Hi,

I have installed Spark 1.0.2 and Shark 0.9.2 on Hadoop 2.4.1 (by compiling from 
source).

spark: 1.0.2
shark: 0.9.2
hadoop: 2.4.1
java: java version “1.7.0_67”
protobuf: 2.5.0


I have tried the smoke test in shark but got  
“java.util.NoSuchElementException” error,  can you please advise how to fix 
this?

shark> create table x1 (a INT);
FAILED: Hive Internal Error: java.util.NoSuchElementException(null)
14/09/01 23:04:24 [main]: ERROR shark.SharkDriver: FAILED: Hive Internal Error: 
java.util.NoSuchElementException(null)
java.util.NoSuchElementException
        at java.util.HashMap$HashIterator.nextEntry(HashMap.java:925)
        at java.util.HashMap$ValueIterator.next(HashMap.java:950)
        at 
org.apache.hadoop.hive.ql.parse.SemanticAnalyzer.genPlan(SemanticAnalyzer.java:8117)
        at 
shark.parse.SharkSemanticAnalyzer.analyzeInternal(SharkSemanticAnalyzer.scala:150)
        at 
org.apache.hadoop.hive.ql.parse.BaseSemanticAnalyzer.analyze(BaseSemanticAnalyzer.java:284)
        at shark.SharkDriver.compile(SharkDriver.scala:215)
        at org.apache.hadoop.hive.ql.Driver.compile(Driver.java:342)
        at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:977)
        at org.apache.hadoop.hive.ql.Driver.run(Driver.java:888)
        at shark.SharkCliDriver.processCmd(SharkCliDriver.scala:340)
        at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:423)
        at shark.SharkCliDriver$.main(SharkCliDriver.scala:237)
        at shark.SharkCliDriver.main(SharkCliDriver.scala)


spark-env.sh
#!/usr/bin/env bash
export CLASSPATH="$HBASE_HOME/lib/hadoop-snappy-0.0.1-SNAPSHOT.jar"
export CLASSPATH="$CLASSPATH:$HIVE_HOME/lib/mysql-connector-java-5.1.31-bin.jar"
export JAVA_LIBRARY_PATH="$HADOOP_HOME/lib/native/Linux-amd64-64"
export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-"/etc/hadoop"}
export 
SPARK_CLASSPATH="$SPARK_HOME/lib_managed/jars/mysql-connector-java-5.1.31-bin.jar"
export SPARK_WORKER_MEMORY=2g
export HADOOP_HEAPSIZE=2000

spark-defaults.conf
spark.executor.memory           2048m
spark.shuffle.spill.compress    false

shark-env.sh
#!/usr/bin/env bash
export SPARK_MEM=2g
export SHARK_MASTER_MEM=2g
SPARK_JAVA_OPTS=" -Dspark.local.dir=/tmp "
SPARK_JAVA_OPTS+="-Dspark.kryoserializer.buffer.mb=10 "
SPARK_JAVA_OPTS+="-verbose:gc -XX:-PrintGCDetails -XX:+PrintGCTimeStamps "
export SPARK_JAVA_OPTS
export SHARK_EXEC_MODE=yarn
export 
SPARK_ASSEMBLY_JAR="$SCALA_HOME/assembly/target/scala-2.10/spark-assembly-1.0.2-hadoop2.4.1.jar"
export SHARK_ASSEMBLY_JAR="target/scala-2.10/shark_2.10-0.9.2.jar"
export HIVE_CONF_DIR="$HIVE_HOME/conf"
export SPARK_LIBPATH=$HADOOP_HOME/lib/native/
export SPARK_LIBRARY_PATH=$HADOOP_HOME/lib/native/
export 
SPARK_CLASSPATH="$SHARK_HOME/lib/hadoop-snappy-0.0.1-SNAPSHOT.jar:$SHARK_HOME/lib/protobuf-java-2.5.0.jar"


Regards
Arthur

Reply via email to