Hi Yu, I am able to run Spark-example's, I am unable to run SparkR example (only Pi example is running on SparkR).
Thank you Regards Suresh On Mon, Jan 19, 2015 at 3:08 PM, Ted Yu <yuzhih...@gmail.com> wrote: > Have you seen this thread ? > http://search-hadoop.com/m/JW1q5PgA7X > > What Spark release are you running ? > > Cheers > > On Mon, Jan 19, 2015 at 12:04 PM, suresh <lanki.sur...@gmail.com> wrote: > >> I am trying to run SparkR shell on aws >> >> I am unable to access worker nodes webUI access. >> >> 15/01/19 19:57:17 ERROR TaskSchedulerImpl: Lost an executor 0 (already >> removed): remote Akka client disassociated >> 15/01/19 19:57:17 ERROR TaskSchedulerImpl: Lost an executor 1 (already >> removed): remote Akka client disassociated >> 15/01/19 19:57:17 ERROR TaskSchedulerImpl: Lost an executor 2 (already >> removed): remote Akka client disassociated >> >> > 15/01/19 19:57:50 ERROR Remoting: >> org.apache.spark.storage.BlockManagerId; >> > local class incompatible: stream classdesc serialVersionUID = >> > 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId; >> local class incompatible: stream classdesc serialVersionUID = >> 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> at >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:617) >> at >> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1622) >> at >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) >> at >> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) >> at >> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) >> at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) >> at >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136) >> at >> >> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) >> at scala.util.Try$.apply(Try.scala:161) >> at >> akka.serialization.Serialization.deserialize(Serialization.scala:98) >> at >> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) >> at >> >> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73) >> at >> >> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) >> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) >> at akka.actor.ActorCell.invoke(ActorCell.scala:456) >> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) >> at akka.dispatch.Mailbox.run(Mailbox.scala:219) >> at >> >> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) >> at >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) >> at >> >> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) >> at >> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) >> at >> >> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) >> 15/01/19 19:57:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId; >> local class incompatible: stream classdesc serialVersionUID = >> 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId; >> local class incompatible: stream classdesc serialVersionUID = >> 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> at >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:617) >> at >> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1622) >> at >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) >> at >> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) >> at >> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) >> at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) >> at >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136) >> at >> >> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) >> at scala.util.Try$.apply(Try.scala:161) >> at >> akka.serialization.Serialization.deserialize(Serialization.scala:98) >> at >> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) >> at >> >> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73) >> at >> >> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) >> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) >> at akka.actor.ActorCell.invoke(ActorCell.scala:456) >> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) >> at akka.dispatch.Mailbox.run(Mailbox.scala:219) >> at >> >> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) >> at >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) >> at >> >> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) >> at >> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) >> at >> >> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) >> 15/01/19 19:57:50 ERROR Remoting: org.apache.spark.storage.BlockManagerId; >> local class incompatible: stream classdesc serialVersionUID = >> 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> java.io.InvalidClassException: org.apache.spark.storage.BlockManagerId; >> local class incompatible: stream classdesc serialVersionUID = >> 2439208141545036836, local class serialVersionUID = -7366074099953117729 >> at >> java.io.ObjectStreamClass.initNonProxy(ObjectStreamClass.java:617) >> at >> java.io.ObjectInputStream.readNonProxyDesc(ObjectInputStream.java:1622) >> at >> java.io.ObjectInputStream.readClassDesc(ObjectInputStream.java:1517) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1771) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:1990) >> at >> java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1915) >> at >> java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1798) >> at >> java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1350) >> at >> java.io.ObjectInputStream.readObject(ObjectInputStream.java:370) >> at >> akka.serialization.JavaSerializer$$anonfun$1.apply(Serializer.scala:136) >> at scala.util.DynamicVariable.withValue(DynamicVariable.scala:57) >> at >> akka.serialization.JavaSerializer.fromBinary(Serializer.scala:136) >> at >> >> akka.serialization.Serialization$$anonfun$deserialize$1.apply(Serialization.scala:104) >> at scala.util.Try$.apply(Try.scala:161) >> at >> akka.serialization.Serialization.deserialize(Serialization.scala:98) >> at >> akka.remote.MessageSerializer$.deserialize(MessageSerializer.scala:23) >> at >> >> akka.remote.DefaultMessageDispatcher.payload$lzycompute$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.payload$1(Endpoint.scala:55) >> at >> akka.remote.DefaultMessageDispatcher.dispatch(Endpoint.scala:73) >> at >> >> akka.remote.EndpointReader$$anonfun$receive$2.applyOrElse(Endpoint.scala:764) >> at akka.actor.ActorCell.receiveMessage(ActorCell.scala:498) >> at akka.actor.ActorCell.invoke(ActorCell.scala:456) >> at akka.dispatch.Mailbox.processMailbox(Mailbox.scala:237) >> at akka.dispatch.Mailbox.run(Mailbox.scala:219) >> at >> >> akka.dispatch.ForkJoinExecutorConfigurator$AkkaForkJoinTask.exec(AbstractDispatcher.scala:386) >> at >> scala.concurrent.forkjoin.ForkJoinTask.doExec(ForkJoinTask.java:260) >> at >> >> scala.concurrent.forkjoin.ForkJoinPool$WorkQueue.runTask(ForkJoinPool.java:1339) >> at >> scala.concurrent.forkjoin.ForkJoinPool.runWorker(ForkJoinPool.java:1979) >> at >> >> scala.concurrent.forkjoin.ForkJoinWorkerThread.run(ForkJoinWorkerThread.java:107) >> >> >> >> -- >> View this message in context: >> http://apache-spark-user-list.1001560.n3.nabble.com/ERROR-TaskSchedulerImpl-Lost-an-executor-tp4566p21241.html >> Sent from the Apache Spark User List mailing list archive at Nabble.com. >> >> --------------------------------------------------------------------- >> To unsubscribe, e-mail: user-unsubscr...@spark.apache.org >> For additional commands, e-mail: user-h...@spark.apache.org >> >> >