1.my hadoop 2.3.0 2.SPARK_HADOOP_VERSION=2.3.0 SPARK_YARN=true sbt/sbt assembly 3.SPARK_YARN_MODE=true SPARK_JAR=$SPARK_HOME/assembly/target/scala-2.10/spark-assembly-0.9.1-hadoop2.3.0.jar SPARK_YARN_APP_JAR=$SPARK_HOME/examples/target/scala-2.10/spark-examples-assembly-0.9.1.jar MASTER=yarn-client $SPARK_HOME/bin/spark-shell
java.lang.NullPointerException at scala.collection.mutable.ArrayOps$ofRef$.length$extension(ArrayOps.scala:114) at scala.collection.mutable.ArrayOps$ofRef.length(ArrayOps.scala:114) at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:32) at scala.collection.mutable.ArrayOps$ofRef.foreach(ArrayOps.scala:108) at org.apache.spark.deploy.yarn.Client$.populateHadoopClasspath(Client.scala:498) at org.apache.spark.deploy.yarn.Client$.populateClasspath(Client.scala:519) at org.apache.spark.deploy.yarn.Client.setupLaunchEnv(Client.scala:333) at org.apache.spark.deploy.yarn.Client.runApp(Client.scala:94) at org.apache.spark.scheduler.cluster.YarnClientSchedulerBackend.start(YarnClientSchedulerBackend.scala:78) at org.apache.spark.scheduler.TaskSchedulerImpl.start(TaskSchedulerImpl.scala:125) at org.apache.spark.SparkContext.<init>(SparkContext.scala:200) at org.apache.spark.repl.SparkILoop.createSparkContext(SparkILoop.scala:959) -- View this message in context: http://apache-spark-user-list.1001560.n3.nabble.com/NullPointerException-when-run-SparkPI-using-YARN-env-tp4917.html Sent from the Apache Spark User List mailing list archive at Nabble.com.