Matt McCline created HIVE-9371:
----------------------------------

             Summary: Execution error for Parquet table and GROUP BY involving 
CHAR data type
                 Key: HIVE-9371
                 URL: https://issues.apache.org/jira/browse/HIVE-9371
             Project: Hive
          Issue Type: Bug
          Components: File Formats, Query Processor
            Reporter: Matt McCline
            Priority: Critical


Query fails involving PARQUET table format, CHAR data type, and GROUP BY.

Probably also fails for VARCHAR, too.

{noformat}
Caused by: org.apache.hadoop.hive.ql.metadata.HiveException: 
java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be cast to 
org.apache.hadoop.hive.serde2.io.HiveCharWritable
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:814)
        at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:815)
        at 
org.apache.hadoop.hive.ql.exec.SelectOperator.processOp(SelectOperator.java:84)
        at org.apache.hadoop.hive.ql.exec.Operator.forward(Operator.java:815)
        at 
org.apache.hadoop.hive.ql.exec.TableScanOperator.processOp(TableScanOperator.java:95)
        at 
org.apache.hadoop.hive.ql.exec.MapOperator$MapOpCtx.forward(MapOperator.java:157)
        at 
org.apache.hadoop.hive.ql.exec.MapOperator.process(MapOperator.java:493)
        ... 10 more
Caused by: java.lang.ClassCastException: org.apache.hadoop.io.Text cannot be 
cast to org.apache.hadoop.hive.serde2.io.HiveCharWritable
        at 
org.apache.hadoop.hive.serde2.objectinspector.primitive.WritableHiveCharObjectInspector.copyObject(WritableHiveCharObjectInspector.java:104)
        at 
org.apache.hadoop.hive.serde2.objectinspector.ObjectInspectorUtils.copyToStandardObject(ObjectInspectorUtils.java:305)
        at 
org.apache.hadoop.hive.ql.exec.KeyWrapperFactory$ListKeyWrapper.deepCopyElements(KeyWrapperFactory.java:150)
        at 
org.apache.hadoop.hive.ql.exec.KeyWrapperFactory$ListKeyWrapper.deepCopyElements(KeyWrapperFactory.java:142)
        at 
org.apache.hadoop.hive.ql.exec.KeyWrapperFactory$ListKeyWrapper.copyKey(KeyWrapperFactory.java:119)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processHashAggr(GroupByOperator.java:827)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processKey(GroupByOperator.java:739)
        at 
org.apache.hadoop.hive.ql.exec.GroupByOperator.processOp(GroupByOperator.java:809)
        ... 16 more
{noformat}

Here is a q file:
{noformat}
SET hive.vectorized.execution.enabled=false;
drop table char_2;

create table char_2 (
  key char(10),
  value char(20)
) stored as parquet;

insert overwrite table char_2 select * from src;

select value, sum(cast(key as int)), count(*) numrows
from src
group by value
order by value asc
limit 5;

explain select value, sum(cast(key as int)), count(*) numrows
from char_2
group by value
order by value asc
limit 5;

-- should match the query from src
select value, sum(cast(key as int)), count(*) numrows
from char_2
group by value
order by value asc
limit 5;

select value, sum(cast(key as int)), count(*) numrows
from src
group by value
order by value desc
limit 5;

explain select value, sum(cast(key as int)), count(*) numrows
from char_2
group by value
order by value desc
limit 5;

-- should match the query from src
select value, sum(cast(key as int)), count(*) numrows
from char_2
group by value
order by value desc
limit 5;

drop table char_2;
{noformat}



--
This message was sent by Atlassian JIRA
(v6.3.4#6332)

Reply via email to