[ 
https://issues.apache.org/jira/browse/HIVE-7426?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=14062934#comment-14062934
 ] 

Matt McCline commented on HIVE-7426:
------------------------------------

Repro:
0) Download the attached TestWithORC.zip file
1) Substitute /tmp/TestV1 in HiveORCCreateTable.sql for a better local 
location, if desired
2) Run command to create schema and load data: hive -f HiveORCCreateTable.sql
3) Run Hive:
source fail_366.sql;
(Or, fail_750.sql or fail_856.sql)

> ClassCastException: ...IntWritable cannot be cast to ...Text involving 
> ql.udf.generic.GenericUDFBasePad.evaluate
> ----------------------------------------------------------------------------------------------------------------
>
>                 Key: HIVE-7426
>                 URL: https://issues.apache.org/jira/browse/HIVE-7426
>             Project: Hive
>          Issue Type: Bug
>            Reporter: Matt McCline
>            Assignee: Matt McCline
>         Attachments: TestWithORC.zip, fail_366.sql, fail_750.sql, fail_856.sql
>
>
> One of several found by Raj Bains.
> M/R or Tez.
> {code}
> set hive.vectorized.execution.enabled=true;
> {code}
> Query:
> {code}
> SELECT `Calcs`.`datetime0` AS `none_datetime0_ok`,   `Calcs`.`int1` AS 
> `none_int1_ok`,   `Calcs`.`key` AS `none_key_nk`,   CASE WHEN 
> (`Calcs`.`datetime0` IS NOT NULL AND `Calcs`.`int1` IS NOT NULL) THEN 
> FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(`Calcs`.`datetime0`)+FLOOR((MONTH(`Calcs`.`datetime0`)+`Calcs`.`int1`)/12)),
>  CONCAT('-', CONCAT(LPAD(PMOD(MONTH(`Calcs`.`datetime0`)+`Calcs`.`int1`, 12), 
> 2, '0'), SUBSTR(`Calcs`.`datetime0`, 8)))), SUBSTR('yyyy-MM-dd 
> HH:mm:ss',0,LENGTH(`Calcs`.`datetime0`))), 'yyyy-MM-dd HH:mm:ss') END AS 
> `none_z_dateadd_month_ok` FROM `default`.`testv1_Calcs` `Calcs` GROUP BY 
> `Calcs`.`datetime0`,   `Calcs`.`int1`,   `Calcs`.`key`,   CASE WHEN 
> (`Calcs`.`datetime0` IS NOT NULL AND `Calcs`.`int1` IS NOT NULL) THEN 
> FROM_UNIXTIME(UNIX_TIMESTAMP(CONCAT((YEAR(`Calcs`.`datetime0`)+FLOOR((MONTH(`Calcs`.`datetime0`)+`Calcs`.`int1`)/12)),
>  CONCAT('-', CONCAT(LPAD(PMOD(MONTH(`Calcs`.`datetime0`)+`Calcs`.`int1`, 12), 
> 2, '0'), SUBSTR(`Calcs`.`datetime0`, 8)))), SUBSTR('yyyy-MM-dd 
> HH:mm:ss',0,LENGTH(`Calcs`.`datetime0`))), 'yyyy-MM-dd HH:mm:ss') END ;
> {code}
> Stack Trace:
> {code}
> Caused by: java.lang.ClassCastException: org.apache.hadoop.io.IntWritable 
> cannot be cast to org.apache.hadoop.io.Text
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFBasePad.evaluate(GenericUDFBasePad.java:65)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.stringEvaluate(GenericUDFConcat.java:189)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.evaluate(GenericUDFConcat.java:159)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.stringEvaluate(GenericUDFConcat.java:189)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.evaluate(GenericUDFConcat.java:159)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.stringEvaluate(GenericUDFConcat.java:189)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFConcat.evaluate(GenericUDFConcat.java:159)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFToUnixTimeStamp.evaluate(GenericUDFToUnixTimeStamp.java:121)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFUnixTimeStamp.evaluate(GenericUDFUnixTimeStamp.java:52)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFBridge.evaluate(GenericUDFBridge.java:177)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator$DeferredExprObject.get(ExprNodeGenericFuncEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.udf.generic.GenericUDFWhen.evaluate(GenericUDFWhen.java:78)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeGenericFuncEvaluator._evaluate(ExprNodeGenericFuncEvaluator.java:166)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:77)
>       at 
> org.apache.hadoop.hive.ql.exec.ExprNodeEvaluator.evaluate(ExprNodeEvaluator.java:65)
>       at 
> org.apache.hadoop.hive.ql.exec.KeyWrapperFactory$ListKeyWrapper.getNewKey(KeyWrapperFactory.java:113)
>       at 
> org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:778)
> {code}



--
This message was sent by Atlassian JIRA
(v6.2#6252)

Reply via email to