looks like you are running kafka on Windows OS. There may be some issues running kafka on windows, not sure. you can try on Linux.
On Wed, Mar 22, 2017 at 9:24 AM, vani reddy <vani.redd...@gmail.com> wrote: > Hi, I am getting below error:I even restarted my system, but still the > same error. > Iam using the version kafka_2.11-0.10.1.0 > > java.io.IOException: The requested operation cannot be performed on a file > with a user-mapped section o > at java.io.RandomAccessFile.setLength(Native Method) > at > kafka.log.AbstractIndex$$anonfun$resize$1.apply(AbstractIndex.scala:115) > at > kafka.log.AbstractIndex$$anonfun$resize$1.apply(AbstractIndex.scala:106) > at kafka.utils.CoreUtils$.inLock(CoreUtils.scala:234) > at kafka.log.AbstractIndex.resize(AbstractIndex.scala:106) > at > kafka.log.AbstractIndex$$anonfun$trimToValidSize$1. > apply$mcV$sp(AbstractIndex.scala:160) > at > kafka.log.AbstractIndex$$anonfun$trimToValidSize$1. > apply(AbstractIndex.scala:160) > at > kafka.log.AbstractIndex$$anonfun$trimToValidSize$1. > apply(AbstractIndex.scala:160) > at kafka.utils.CoreUtils$.inLock(CoreUtils.scala:234) > at kafka.log.AbstractIndex.trimToValidSize(AbstractIndex. > scala:159) > at kafka.log.LogSegment.recover(LogSegment.scala:236) > at kafka.log.Log$$anonfun$loadSegments$4.apply(Log.scala:218) > at kafka.log.Log$$anonfun$loadSegments$4.apply(Log.scala:179) > at > scala.collection.TraversableLike$WithFilter$$anonfun$foreach$1.apply( > TraversableLike.scala:7 > at > scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized. > scala:33) > at > scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:186) > at > scala.collection.TraversableLike$WithFilter.foreach(TraversableLike.scala: > 732) > at kafka.log.Log.loadSegments(Log.scala:179) > at kafka.log.Log.<init>(Log.scala:108) > at kafka.log.LogManager.createLog(LogManager.scala:362) > at kafka.cluster.Partition.getOrCreateReplica(Partition.scala:94) > at > kafka.cluster.Partition$$anonfun$4$$anonfun$apply$2. > apply(Partition.scala:174) > at > kafka.cluster.Partition$$anonfun$4$$anonfun$apply$2. > apply(Partition.scala:174) > at scala.collection.mutable.HashSet.foreach(HashSet.scala:78) > at kafka.cluster.Partition$$anonfun$4.apply(Partition.scala:174) > at kafka.cluster.Partition$$anonfun$4.apply(Partition.scala:168) > at kafka.utils.CoreUtils$.inLock(CoreUtils.scala:234) > at kafka.utils.CoreUtils$.inWriteLock(CoreUtils.scala:242) > at kafka.cluster.Partition.makeLeader(Partition.scala:168) > at > kafka.server.ReplicaManager$$anonfun$makeLeaders$4.apply( > ReplicaManager.scala:740) > at > kafka.server.ReplicaManager$$anonfun$makeLeaders$4.apply( > ReplicaManager.scala:739) > at > scala.collection.mutable.HashMap$$anonfun$foreach$1. > apply(HashMap.scala:99) > at > scala.collection.mutable.HashMap$$anonfun$foreach$1. > apply(HashMap.scala:99) > at > scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:230) > at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:40) > at scala.collection.mutable.HashMap.foreach(HashMap.scala:99) > at kafka.server.ReplicaManager.makeLeaders(ReplicaManager. > scala:739) > at > kafka.server.ReplicaManager.becomeLeaderOrFollower( > ReplicaManager.scala:685) > at > kafka.server.KafkaApis.handleLeaderAndIsrRequest(KafkaApis.scala:148) > at kafka.server.KafkaApis.handle(KafkaApis.scala:82) > at > kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:60) > at java.lang.Thread.run(Thread.java:745) > > -- > Regards, > Vani Reddy >