[ https://issues.apache.org/jira/browse/HIVE-21104?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Rajesh Balamohan updated HIVE-21104: ------------------------------------ Status: Patch Available (was: Open) > PTF with nested structure throws ClassCastException > --------------------------------------------------- > > Key: HIVE-21104 > URL: https://issues.apache.org/jira/browse/HIVE-21104 > Project: Hive > Issue Type: Bug > Components: Hive > Reporter: Rajesh Balamohan > Assignee: Rajesh Balamohan > Priority: Major > Attachments: HIVE-21104.1.patch > > > {noformat} > DROP TABLE IF EXISTS dummy; > CREATE TABLE dummy (i int); > INSERT INTO TABLE dummy VALUES (1); > DROP TABLE IF EXISTS struct_table_example; > CREATE TABLE struct_table_example (a int, s1 struct<f1: boolean, f2: string, > f3: int, f4: int> ) STORED AS ORC; > INSERT INTO TABLE struct_table_example SELECT 1, named_struct('f1', false, > 'f2', 'test', 'f3', 3, 'f4', 4) FROM dummy; > select s1.f1, s1.f2, rank() over (partition by s1.f2 order by s1.f4) from > struct_table_example; > {noformat} > This would throw the following error > {noformat} > Caused by: java.lang.RuntimeException: > org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime Error while > processing row (tag=0) > {"key":{"reducesinkkey0":"test","reducesinkkey1":4},"value":{"_col1":{"f1":false,"f2":"test","f3":3,"f4":4}}} > at > org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource.pushRecord(ReduceRecordSource.java:297) > at > org.apache.hadoop.hive.ql.exec.tez.ReduceRecordProcessor.run(ReduceRecordProcessor.java:317) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:185) > ... 14 more > Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: Hive Runtime > Error while processing row (tag=0) > {"key":{"reducesinkkey0":"test","reducesinkkey1":4},"value":{"_col1":{"f1":false,"f2":"test","f3":3,"f4":4}}} > at > org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource$GroupIterator.next(ReduceRecordSource.java:365) > at > org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource.pushRecord(ReduceRecordSource.java:287) > ... 16 more > Caused by: java.lang.ClassCastException: > org.apache.hadoop.hive.serde2.lazybinary.LazyBinaryStruct cannot be cast to > org.apache.hadoop.io.IntWritable > at > org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableIntObjectInspector.getPrimitiveJavaObject(WritableIntObjectInspector.java:46) > at > org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.copyToStandardObject(ObjectInspectorUtils.java:412) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank.copyToStandardObject(GenericUDAFRank.java:219) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDAFRank$GenericUDAFAbstractRankEvaluator.iterate(GenericUDAFRank.java:154) > at > org.apache.hadoop.hive.ql.udf.generic.GenericUDAFEvaluator.aggregate(GenericUDAFEvaluator.java:192) > at > org.apache.hadoop.hive.ql.udf.ptf.WindowingTableFunction.processRow(WindowingTableFunction.java:407) > at > org.apache.hadoop.hive.ql.exec.PTFOperator$PTFInvocation.processRow(PTFOperator.java:325) > at > org.apache.hadoop.hive.ql.exec.PTFOperator.process(PTFOperator.java:139) > at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:897) > at > org.apache.hadoop.hive.ql.exec.SelectOperator.process(SelectOperator.java:95) > at > org.apache.hadoop.hive.ql.exec.tez.ReduceRecordSource$GroupIterator.next(ReduceRecordSource.java:356) > ... 17 more > ]], Vertex did not succeed due to OWN_TASK_FAILURE, failedTasks:1 > killedTasks:0, Vertex vertex_1546783872011_263870_1_01 [Reducer 2] > killed/failed due to:OWN_TASK_FAILURE]DAG did not succeed due to > VERTEX_FAILURE. failedVertices:1 killedVertices:0 > at org.apache.hadoop.hive.ql.exec.tez.TezTask.execute(TezTask.java:196) > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:199) > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:100) > at org.apache.hadoop.hive.ql.exec.TaskRunner.run(TaskRunner.java:79) > (state=08S01,code=2) > {noformat} -- This message was sent by Atlassian JIRA (v7.6.3#76005)