That means not all of your driver and executors have the same version of Spark. Are you on a standalone EC2 cluster? If so, one way to fix this is to run the following on the master node:
/root/spark-ec2/copy-dir --delete /root/spark This syncs all of Spark across your cluster, configs, jars and everything. 2014-05-23 15:20 GMT-07:00 Suman Somasundar <suman.somasun...@oracle.com>: > Hi, > > I get the following exception when using Spark to run various programs. > > java.io.InvalidClassException: org.apache.spark.SerializableWritable; > local class incompatible: stream classdesc serialVersionUID = > 6301214776158303468, local class serialVersionUID = -7785455416944904980 > at java.io.ObjectStreamClass.initNonProxy( > ObjectStreamClass.java:604) > at java.io.ObjectInputStream.readNonProxyDesc( > ObjectInputStream.java:1601) > at java.io.ObjectInputStream.readClassDesc( > ObjectInputStream.java:1514) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1750) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.readObject(ObjectInputStream. > java:369) > at org.apache.spark.serializer.JavaDeserializationStream. > readObject(JavaSerializer.scala:40) > at org.apache.spark.broadcast.HttpBroadcast$.read( > HttpBroadcast.scala:165) > at org.apache.spark.broadcast.HttpBroadcast.readObject( > HttpBroadcast.scala:56) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke( > NativeMethodAccessorImpl.java:57) > at sun.reflect.DelegatingMethodAccessorImpl.invoke( > DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:601) > at java.io.ObjectStreamClass.invokeReadObject( > ObjectStreamClass.java:1004) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1866) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.defaultReadFields( > ObjectInputStream.java:1964) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1888) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.defaultReadFields( > ObjectInputStream.java:1964) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1888) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.readObject(ObjectInputStream. > java:369) > at scala.collection.immutable.$colon$colon.readObject(List. > scala:362) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke( > NativeMethodAccessorImpl.java:57) > at sun.reflect.DelegatingMethodAccessorImpl.invoke( > DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:601) > at java.io.ObjectStreamClass.invokeReadObject( > ObjectStreamClass.java:1004) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1866) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.defaultReadFields( > ObjectInputStream.java:1964) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1888) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.defaultReadFields( > ObjectInputStream.java:1964) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1888) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.readObject(ObjectInputStream. > java:369) > at scala.collection.immutable.$colon$colon.readObject(List. > scala:362) > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > at sun.reflect.NativeMethodAccessorImpl.invoke( > NativeMethodAccessorImpl.java:57) > at sun.reflect.DelegatingMethodAccessorImpl.invoke( > DelegatingMethodAccessorImpl.java:43) > at java.lang.reflect.Method.invoke(Method.java:601) > at java.io.ObjectStreamClass.invokeReadObject( > ObjectStreamClass.java:1004) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1866) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.defaultReadFields( > ObjectInputStream.java:1964) > at java.io.ObjectInputStream.readSerialData( > ObjectInputStream.java:1888) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1771) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.readObject(ObjectInputStream. > java:369) > at org.apache.spark.serializer.JavaDeserializationStream. > readObject(JavaSerializer.scala:40) > at org.apache.spark.scheduler.ResultTask$.deserializeInfo( > ResultTask.scala:63) > at org.apache.spark.scheduler.ResultTask.readExternal( > ResultTask.scala:139) > at java.io.ObjectInputStream.readExternalData( > ObjectInputStream.java:1810) > at java.io.ObjectInputStream.readOrdinaryObject( > ObjectInputStream.java:1769) > at java.io.ObjectInputStream.readObject0(ObjectInputStream. > java:1347) > at java.io.ObjectInputStream.readObject(ObjectInputStream. > java:369) > at org.apache.spark.serializer.JavaDeserializationStream. > readObject(JavaSerializer.scala:40) > at org.apache.spark.serializer.JavaSerializerInstance. > deserialize(JavaSerializer.scala:62) > at org.apache.spark.executor.Executor$TaskRunner$$anonfun$ > run$1.apply$mcV$sp(Executor.scala:193) > at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run( > SparkHadoopUtil.scala:42) > at org.apache.spark.deploy.SparkHadoopUtil$$anon$1.run( > SparkHadoopUtil.scala:41) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:415) > at org.apache.hadoop.security.UserGroupInformation.doAs( > UserGroupInformation.java:1121) > at org.apache.spark.deploy.SparkHadoopUtil.runAsUser( > SparkHadoopUtil.scala:41) > at org.apache.spark.executor.Executor$TaskRunner.run( > Executor.scala:176) > at java.util.concurrent.ThreadPoolExecutor.runWorker( > ThreadPoolExecutor.java:1110) > at java.util.concurrent.ThreadPoolExecutor$Worker.run( > ThreadPoolExecutor.java:603) > at java.lang.Thread.run(Thread.java:722) > > What is the cause of this exception? > > Thanks, > Suman. > >