Looks like you put a wrong configuration file which crashed spark to parse the configuration values from it.
Thanks Best Regards On Mon, Dec 21, 2015 at 3:35 PM, Divya Gehlot <divya.htco...@gmail.com> wrote: > Hi, > I am trying to configure spark for hive context (Please dont get mistaken > with hive on spark ) > I placed hive-site.xml in spark/CONF_DIR > Now when I run spark-shell I am getting below error > Version which I am using > > > > > *Hadoop 2.6.2 Spark 1.5.2 Hive 1.2.1 * > > > Welcome to >> ____ __ >> / __/__ ___ _____/ /__ >> _\ \/ _ \/ _ `/ __/ '_/ >> /___/ .__/\_,_/_/ /_/\_\ version 1.5.2 >> /_/ >> >> Using Scala version 2.10.4 (Java HotSpot(TM) 64-Bit Server VM, Java >> 1.8.0_66) >> Type in expressions to have them evaluated. >> Type :help for more information. >> Spark context available as sc. >> java.lang.RuntimeException: java.lang.IllegalArgumentException: >> java.net.URISyntaxException: Relative path in absolute URI: >> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D >> at >> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522) >> at >> org.apache.spark.sql.hive.client.ClientWrapper.<init>(ClientWrapper.scala:171) >> at >> org.apache.spark.sql.hive.HiveContext.executionHive$lzycompute(HiveContext.scala:162) >> at >> org.apache.spark.sql.hive.HiveContext.executionHive(HiveContext.scala:160) >> at org.apache.spark.sql.hive.HiveContext.<init>(HiveContext.scala:167) >> at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native >> Method) >> at >> sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) >> at >> sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) >> at java.lang.reflect.Constructor.newInstance(Constructor.java:422) >> at >> org.apache.spark.repl.SparkILoop.createSQLContext(SparkILoop.scala:1028) >> at $iwC$$iwC.<init>(<console>:9) >> at $iwC.<init>(<console>:18) >> at <init>(<console>:20) >> at .<init>(<console>:24) >> at .<clinit>(<console>) >> at .<init>(<console>:7) >> at .<clinit>(<console>) >> at $print(<console>) >> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) >> at >> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) >> at >> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) >> at java.lang.reflect.Method.invoke(Method.java:497) >> at >> org.apache.spark.repl.SparkIMain$ReadEvalPrint.call(SparkIMain.scala:1065) >> at >> org.apache.spark.repl.SparkIMain$Request.loadAndRun(SparkIMain.scala:1340) >> at >> org.apache.spark.repl.SparkIMain.loadAndRunReq$1(SparkIMain.scala:840) >> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:871) >> at org.apache.spark.repl.SparkIMain.interpret(SparkIMain.scala:819) >> at >> org.apache.spark.repl.SparkILoop.reallyInterpret$1(SparkILoop.scala:857) >> at >> org.apache.spark.repl.SparkILoop.interpretStartingWith(SparkILoop.scala:902) >> at org.apache.spark.repl.SparkILoop.command(SparkILoop.scala:814) >> at >> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:132) >> at >> org.apache.spark.repl.SparkILoopInit$$anonfun$initializeSpark$1.apply(SparkILoopInit.scala:124) >> at >> org.apache.spark.repl.SparkIMain.beQuietDuring(SparkIMain.scala:324) >> at >> org.apache.spark.repl.SparkILoopInit$class.initializeSpark(SparkILoopInit.scala:124) >> at >> org.apache.spark.repl.SparkILoop.initializeSpark(SparkILoop.scala:64) >> at >> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1$$anonfun$apply$mcZ$sp$5.apply$mcV$sp(SparkILoop.scala:974) >> at >> org.apache.spark.repl.SparkILoopInit$class.runThunks(SparkILoopInit.scala:159) >> at org.apache.spark.repl.SparkILoop.runThunks(SparkILoop.scala:64) >> at >> org.apache.spark.repl.SparkILoopInit$class.postInitialization(SparkILoopInit.scala:108) >> at >> org.apache.spark.repl.SparkILoop.postInitialization(SparkILoop.scala:64) >> at >> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply$mcZ$sp(SparkILoop.scala:991) >> at >> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) >> at >> org.apache.spark.repl.SparkILoop$$anonfun$org$apache$spark$repl$SparkILoop$$process$1.apply(SparkILoop.scala:945) >> at >> scala.tools.nsc.util.ScalaClassLoader$.savingContextLoader(ScalaClassLoader.scala:135) >> at org.apache.spark.repl.SparkILoop.org >> $apache$spark$repl$SparkILoop$$process(SparkILoop.scala:945) >> at org.apache.spark.repl.SparkILoop.process(SparkILoop.scala:1059) >> at org.apache.spark.repl.Main$.main(Main.scala:31) >> at org.apache.spark.repl.Main.main(Main.scala) >> at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) >> at >> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) >> at >> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) >> at java.lang.reflect.Method.invoke(Method.java:497) >> at >> org.apache.spark.deploy.SparkSubmit$.org$apache$spark$deploy$SparkSubmit$$runMain(SparkSubmit.scala:674) >> at >> org.apache.spark.deploy.SparkSubmit$.doRunMain$1(SparkSubmit.scala:180) >> at org.apache.spark.deploy.SparkSubmit$.submit(SparkSubmit.scala:205) >> at org.apache.spark.deploy.SparkSubmit$.main(SparkSubmit.scala:120) >> at org.apache.spark.deploy.SparkSubmit.main(SparkSubmit.scala) >> Caused by: java.lang.IllegalArgumentException: >> java.net.URISyntaxException: Relative path in absolute URI: >> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D >> at org.apache.hadoop.fs.Path.initialize(Path.java:206) >> at org.apache.hadoop.fs.Path.<init>(Path.java:172) >> at >> org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:563) >> at >> org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508) >> ... 56 more >> Caused by: java.net.URISyntaxException: Relative path in absolute URI: >> ${system:java.io.tmpdir%7D/$%7Bsystem:user.name%7D >> at java.net.URI.checkPath(URI.java:1823) >> at java.net.URI.<init>(URI.java:745) >> at org.apache.hadoop.fs.Path.initialize(Path.java:203) >> ... 59 more >> >> <console>:10: error: not found: value sqlContext >> import sqlContext.implicits._ >> ^ >> <console>:10: error: not found: value sqlContext >> import sqlContext.sql >> > >