Need help in resolving java.lang.ClassCastException when I execute a Spark SQL  
command
%sql insert overwrite table  table2 partition(node) select  * from  table1 
where  field1 '%google%'and node = 'DCP2'
Job aborted due to stage failure: Task 0 in stage 66.0 failed 4 times, most 
recent failure: Lost task 0.3 in stage 66.0 (TID 21164, devsb08.tmobl.com): 
java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to 
org.apache.hadoop.io.IntWritable at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector.get(WritableIntObjectInspector.java:36)
 at 
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$4.apply(TableReader.scala:352)
 at 
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$14$$anonfun$apply$4.apply(TableReader.scala:352)
 at 
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$fillObject$2.apply(TableReader.scala:390)
 at 
org.apache.spark.sql.hive.HadoopTableReader$$anonfun$fillObject$2.apply(TableReader.scala:382)
 at scala.collection.Iterator$$anon$11.next(Iterator.scala:328) at 
scala.collection.Iterator$$anon$14.hasNext(Iterator.scala:389) at 
scala.collection.Iterator$$anon$11.hasNext(Iterator.scala:327) at 
scala.collection.Iterator$class.foreach(Iterator.scala:727) at 
scala.collection.AbstractIterator.foreach(Iterator.scala:1157) at 
org.apache.spark.sql.hive.execution.InsertIntoHiveTable.org$apache$spark$sql$hive$execution$InsertIntoHiveTable$$writeToFile$1(InsertIntoHiveTable.scala:101)
 at 
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$saveAsHiveFile$3.apply(InsertIntoHiveTable.scala:83)
 at 
org.apache.spark.sql.hive.execution.InsertIntoHiveTable$$anonfun$saveAsHiveFile$3.apply(InsertIntoHiveTable.scala:83)
 at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:63) at 
org.apache.spark.scheduler.Task.run(Task.scala:70) at 
org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:213) at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142) 
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617) 
at java.lang.Thread.run(Thread.java:745)Driver stacktrace:set 
zeppelin.spark.sql.stacktrace = true to see full stacktrace 
P.S. "Arvind" Aravind
www.linkedin.com/in/psaravind

Reply via email to