[ 
https://issues.apache.org/jira/browse/HIVE-27161?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=17781743#comment-17781743
 ] 

Basapuram Kumar edited comment on HIVE-27161 at 11/1/23 2:07 PM:
-----------------------------------------------------------------

I am facing this issue in hive-4.0.0-alpha-1 too.

Here are the steps to repro.
{code:java}
1.
CREATE TABLE emp_zlib_3110_2038 (
`__time` TIMESTAMP,
 id int,
 name string,
 age int,
 gender string)
 STORED AS ORC
 TBLPROPERTIES ('transactional'='true',"orc.compress"="ZLIB");

2.
insert into emp_zlib_3110_2038 values(CURRENT_TIMESTAMP(),10,'basa',30,'M');

3.
CREATE EXTERNAL TABLE druid_table_0111
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
TBLPROPERTIES ("druid.datasource" = 
"basa_druid_table_01112023","orc.compress"="ZLIB")
AS 
select `__time`, id,name,age,gender from emp_zlib_3110_2038;
 {code}
Error as followed.
{code:java}
INFO  : Status: Running (Executing on YARN cluster with App id 
application_1698753624381_0028)
----------------------------------------------------------------------------------------------
        VERTICES      MODE        STATUS  TOTAL  COMPLETED  RUNNING  PENDING  
FAILED  KILLED
----------------------------------------------------------------------------------------------
Map 1 .......... container     SUCCEEDED      1          1        0        0    
   0       0
Reducer 2 ...... container     SUCCEEDED      2          2        0        0    
   0       0
----------------------------------------------------------------------------------------------
VERTICES: 02/02  [==========================>>] 100%  ELAPSED TIME: 18.13 s
----------------------------------------------------------------------------------------------
 

INFO  : Starting task [Stage-4:DDL] in serial modeERROR : 
Failedorg.apache.hadoop.hive.ql.metadata.HiveException: 
MetaException(message:LOCATION may not be specified for Druid)        
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1313) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1318) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
at 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:140)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:98)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:212) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]        
at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:361) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:334) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:245) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:106) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:348) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:204) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:153) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:148) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at 
org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:233)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at 
org.apache.hive.service.cli.operation.SQLOperation.access$500(SQLOperation.java:88)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:336)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_382]    
at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_382]    
at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
 ~[hadoop-common-3.2.3.3.2.2.0-1.jar:?]  
at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:356)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
~[?:1.8.0_382]       
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_382]      
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
~[?:1.8.0_382]       
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
~[?:1.8.0_382]       
at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]

Caused by: org.apache.hadoop.hive.metastore.api.MetaException: LOCATION may not 
be specified for Druid  
at 
org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:219)
 ~[hive-druid-handler-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1402)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1378)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1369)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_382]   
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_382] 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_382]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_382]
at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:218)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]
at com.sun.proxy.$Proxy33.createTable(Unknown Source) ~[?:?]    
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_382]   
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_382]
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_382] at java.lang.reflect.Method.invoke(Method.java:498) 
~[?:1.8.0_382]      
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:4342)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at com.sun.proxy.$Proxy33.createTable(Unknown Source) ~[?:?]    
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1302) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
... 27 moreERROR : 

DDLTask failed, DDL Operation: class 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation{code}
Is there any workaround/patch to pass this state.? Appreciated any help on this.

 


was (Author: JIRAUSER295887):
I am facing this issue in hive-4.0.0-alpha-1 too.

Here are the steps to repro.

 
{code:java}
1.
CREATE TABLE emp_zlib_3110_2038 (
`__time` TIMESTAMP,
 id int,
 name string,
 age int,
 gender string)
 STORED AS ORC
 TBLPROPERTIES ('transactional'='true',"orc.compress"="ZLIB");

2.
insert into emp_zlib_3110_2038 values(CURRENT_TIMESTAMP(),10,'basa',30,'M');

3.
CREATE EXTERNAL TABLE druid_table_0111
STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
TBLPROPERTIES ("druid.datasource" = 
"basa_druid_table_01112023","orc.compress"="ZLIB")
AS 
select `__time`, id,name,age,gender from emp_zlib_3110_2038;
 {code}
Error as follwed.

 

 
{code:java}
INFO  : Status: Running (Executing on YARN cluster with App id 
application_1698753624381_0028)
----------------------------------------------------------------------------------------------
        VERTICES      MODE        STATUS  TOTAL  COMPLETED  RUNNING  PENDING  
FAILED  KILLED
----------------------------------------------------------------------------------------------
Map 1 .......... container     SUCCEEDED      1          1        0        0    
   0       0
Reducer 2 ...... container     SUCCEEDED      2          2        0        0    
   0       0
----------------------------------------------------------------------------------------------
VERTICES: 02/02  [==========================>>] 100%  ELAPSED TIME: 18.13 s
----------------------------------------------------------------------------------------------
 

INFO  : Starting task [Stage-4:DDL] in serial modeERROR : 
Failedorg.apache.hadoop.hive.ql.metadata.HiveException: 
MetaException(message:LOCATION may not be specified for Druid)        
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1313) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1318) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
at 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:140)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:98)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:212) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]        
at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:361) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:334) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:245) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:106) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:348) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:204) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:153) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.Driver.run(Driver.java:148) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]    
at 
org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:233)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]       
at 
org.apache.hive.service.cli.operation.SQLOperation.access$500(SQLOperation.java:88)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork$1.run(SQLOperation.java:336)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]   
at java.security.AccessController.doPrivileged(Native Method) ~[?:1.8.0_382]    
at javax.security.auth.Subject.doAs(Subject.java:422) ~[?:1.8.0_382]    
at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1762)
 ~[hadoop-common-3.2.3.3.2.2.0-1.jar:?]  
at 
org.apache.hive.service.cli.operation.SQLOperation$BackgroundWork.run(SQLOperation.java:356)
 ~[hive-service-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) 
~[?:1.8.0_382]       
at java.util.concurrent.FutureTask.run(FutureTask.java:266) ~[?:1.8.0_382]      
at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) 
~[?:1.8.0_382]       
at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) 
~[?:1.8.0_382]       
at java.lang.Thread.run(Thread.java:750) [?:1.8.0_382]

Caused by: org.apache.hadoop.hive.metastore.api.MetaException: LOCATION may not 
be specified for Druid  
at 
org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:219)
 ~[hive-druid-handler-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1402)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1378)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1369)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]     
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_382]   
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_382] 
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_382]
at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_382]
at 
org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:218)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]
at com.sun.proxy.$Proxy33.createTable(Unknown Source) ~[?:?]    
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) ~[?:1.8.0_382]   
at 
sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
~[?:1.8.0_382]
at 
sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
 ~[?:1.8.0_382] at java.lang.reflect.Method.invoke(Method.java:498) 
~[?:1.8.0_382]      
at 
org.apache.hadoop.hive.metastore.HiveMetaStoreClient$SynchronizedHandler.invoke(HiveMetaStoreClient.java:4342)
 ~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1]      
at com.sun.proxy.$Proxy33.createTable(Unknown Source) ~[?:?]    
at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1302) 
~[hive-exec-3.1.4.3.2.2.0-1.jar:3.1.4.3.2.2.0-1] 
... 27 moreERROR : 

DDLTask failed, DDL Operation: class 
org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation{code}
Is there any workaround/patch to pass this state.? Appreciated any help on this.

 

> MetaException when executing CTAS query in Druid storage handler
> ----------------------------------------------------------------
>
>                 Key: HIVE-27161
>                 URL: https://issues.apache.org/jira/browse/HIVE-27161
>             Project: Hive
>          Issue Type: Bug
>          Components: Druid integration
>    Affects Versions: 4.0.0-alpha-2
>            Reporter: Stamatis Zampetakis
>            Priority: Major
>
> Any kind of CTAS query targeting the Druid storage handler fails with the 
> following exception:
> {noformat}
> org.apache.hadoop.hive.ql.metadata.HiveException: 
> MetaException(message:LOCATION may not be specified for Druid)
>       at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1347) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1352) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.createTableNonReplaceMode(CreateTableOperation.java:158)
>  ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.ddl.table.create.CreateTableOperation.execute(CreateTableOperation.java:116)
>  ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.ddl.DDLTask.execute(DDLTask.java:84) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:105) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Executor.launchTask(Executor.java:354) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Executor.launchTasks(Executor.java:327) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Executor.runTasks(Executor.java:244) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Executor.execute(Executor.java:105) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:367) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:205) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:154) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.Driver.run(Driver.java:149) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:185) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:228) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:257) 
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd1(CliDriver.java:201) 
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:127) 
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:425) 
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
>       at org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:356) 
> ~[hive-cli-4.0.0-SNAPSHOT.jar:?]
>       at 
> org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler.initDataset(QTestDatasetHandler.java:86)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.dataset.QTestDatasetHandler.beforeTest(QTestDatasetHandler.java:190)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.ql.qoption.QTestOptionDispatcher.beforeTest(QTestOptionDispatcher.java:79)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.apache.hadoop.hive.ql.QTestUtil.cliInit(QTestUtil.java:607) 
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:112)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:157) 
> ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:60)
>  ~[test-classes/:?]
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> ~[?:1.8.0_261]
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> ~[?:1.8.0_261]
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_261]
>       at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_261]
>       at 
> org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:59)
>  ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
>  ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:56)
>  ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
>  ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$2$1.evaluate(CliAdapter.java:135)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) 
> ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.runners.BlockJUnit4ClassRunner$1.evaluate(BlockJUnit4ClassRunner.java:100)
>  ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:366) 
> ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:103)
>  ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:63)
>  ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.Suite.runChild(Suite.java:128) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.Suite.runChild(Suite.java:27) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$4.run(ParentRunner.java:331) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:79) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:329) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.access$100(ParentRunner.java:66) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:293) 
> ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.apache.hadoop.hive.cli.control.CliAdapter$1$1.evaluate(CliAdapter.java:95)
>  ~[hive-it-util-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at org.junit.rules.RunRules.evaluate(RunRules.java:20) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner$3.evaluate(ParentRunner.java:306) 
> ~[junit-4.13.2.jar:4.13.2]
>       at org.junit.runners.ParentRunner.run(ParentRunner.java:413) 
> ~[junit-4.13.2.jar:4.13.2]
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:365)
>  ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeWithRerun(JUnit4Provider.java:273)
>  ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:238)
>  ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:159)
>  ~[surefire-junit4-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:377)
>  ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.execute(ForkedBooter.java:138) 
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.run(ForkedBooter.java:465) 
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
>       at 
> org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:451) 
> ~[surefire-booter-3.0.0-M4.jar:3.0.0-M4]
> Caused by: org.apache.hadoop.hive.metastore.api.MetaException: LOCATION may 
> not be specified for Druid
>       at 
> org.apache.hadoop.hive.druid.DruidStorageHandler.preCreateTable(DruidStorageHandler.java:219)
>  ~[hive-druid-handler-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1459)
>  ~[hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1435)
>  ~[hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at 
> org.apache.hadoop.hive.metastore.HiveMetaStoreClient.createTable(HiveMetaStoreClient.java:1426)
>  ~[hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
> ~[?:1.8.0_261]
>       at 
> sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 
> ~[?:1.8.0_261]
>       at 
> sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
>  ~[?:1.8.0_261]
>       at java.lang.reflect.Method.invoke(Method.java:498) ~[?:1.8.0_261]
>       at 
> org.apache.hadoop.hive.metastore.RetryingMetaStoreClient.invoke(RetryingMetaStoreClient.java:218)
>  ~[hive-standalone-metastore-common-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       at com.sun.proxy.$Proxy133.createTable(Unknown Source) ~[?:?]
>       at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:1336) 
> ~[hive-exec-4.0.0-SNAPSHOT.jar:4.0.0-SNAPSHOT]
>       ... 67 more
> {noformat}
> One way of reproducing the problem is by removing the {{@Ignore}} annotation 
> from {{TestMiniDruidCliDriver}} and running:
> {noformat}
> mvn test -Dtest=TestMiniDruidCliDriver -Dqfile=druidmini_expressions.q
> {noformat}
> The druidmini_expressions.q file has {{druid_table_alltypesorc}} dataset and 
> the latter is initialized with the CTAS query outlined below:
> {code:sql}
> CREATE EXTERNAL TABLE druid_table_alltypesorc
> STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler'
> TBLPROPERTIES ("druid.segment.granularity" = "HOUR", 
> "druid.query.granularity" = "MINUTE")
> AS
> SELECT cast (`ctimestamp1` as timestamp with local time zone) as `__time`,
>   cstring1,
>   cstring2,
>   cdouble,
>   cfloat,
>   ctinyint,
>   csmallint,
>   cint,
>   cbigint,
>   cboolean1,
>   cboolean2,
>   cast(cint as string) as cintstring,
>   cast(cfloat as string) as cfloatstring,
>   cast(cdouble as string) as cdoublestring
>   FROM alltypesorc1 where ctimestamp1 IS NOT NULL;
> {code}
> This is a regression caused by HIVE-26771 that is likely to affect other 
> storage handlers as well.



--
This message was sent by Atlassian Jira
(v8.20.10#820010)

Reply via email to