[ https://issues.apache.org/jira/browse/HIVE-19451?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
Nishant Bangarwa updated HIVE-19451: ------------------------------------ Status: Patch Available (was: Open) > Druid Query Execution fails with ClassNotFoundException > org.antlr.v4.runtime.CharStream > --------------------------------------------------------------------------------------- > > Key: HIVE-19451 > URL: https://issues.apache.org/jira/browse/HIVE-19451 > Project: Hive > Issue Type: Task > Reporter: Nishant Bangarwa > Assignee: Nishant Bangarwa > Priority: Major > Attachments: HIVE-19451.patch > > > Stack trace - > {code} > ERROR : Status: Failed > ERROR : Vertex failed, vertexName=Map 1, > vertexId=vertex_1524814504173_1344_45_00, diagnostics=[Task failed, > taskId=task_1524814504173_1344_45_00_000029, diagnostics=[TaskAttempt 0 > failed, info=[Error: Error while running task ( failure ) : > attempt_1524814504173_1344_45_00_000029_0:java.lang.RuntimeException: > java.io.IOException: > org.apache.hive.druid.com.fasterxml.jackson.databind.exc.InvalidDefinitionException: > Cannot construct instance of > `org.apache.hive.druid.io.druid.segment.virtual.ExpressionVirtualColumn`, > problem: org/antlr/v4/runtime/CharStream > at [Source: > (String)"{"queryType":"scan","dataSource":{"type":"table","name":"tpcds_real_bin_partitioned_orc_1000.tpcds_denormalized_druid_table_7mcd"},"intervals":{"type":"segments","segments":[{"itvl":"1998-11-30T00:00:00.000Z/1998-12-01T00:00:00.000Z","ver":"2018-05-03T11:35:22.230Z","part":0}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"resultFormat":"compactedList","batchSize":20480,"limit":9223372036854775807,"filter":{"type":"bound","dimension":"i_brand"[truncated > 241 chars]; line: 1, column: 376] (through reference chain: > org.apache.hive.druid.io.druid.query.scan.ScanQuery["virtualColumns"]->java.util.ArrayList[0]) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:296) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.run(TezProcessor.java:250) > at > org.apache.tez.runtime.LogicalIOProcessorRuntimeTask.run(LogicalIOProcessorRuntimeTask.java:374) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:73) > at > org.apache.tez.runtime.task.TaskRunner2Callable$1.run(TaskRunner2Callable.java:61) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1682) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:61) > at > org.apache.tez.runtime.task.TaskRunner2Callable.callInternal(TaskRunner2Callable.java:37) > at org.apache.tez.common.CallableWithNdc.call(CallableWithNdc.java:36) > at > org.apache.hadoop.hive.llap.daemon.impl.StatsRecordingThreadPool$WrappedCallable.call(StatsRecordingThreadPool.java:110) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > Caused by: java.io.IOException: > org.apache.hive.druid.com.fasterxml.jackson.databind.exc.InvalidDefinitionException: > Cannot construct instance of > `org.apache.hive.druid.io.druid.segment.virtual.ExpressionVirtualColumn`, > problem: org/antlr/v4/runtime/CharStream > at [Source: > (String)"{"queryType":"scan","dataSource":{"type":"table","name":"tpcds_real_bin_partitioned_orc_1000.tpcds_denormalized_druid_table_7mcd"},"intervals":{"type":"segments","segments":[{"itvl":"1998-11-30T00:00:00.000Z/1998-12-01T00:00:00.000Z","ver":"2018-05-03T11:35:22.230Z","part":0}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"resultFormat":"compactedList","batchSize":20480,"limit":9223372036854775807,"filter":{"type":"bound","dimension":"i_brand"[truncated > 241 chars]; line: 1, column: 376] (through reference chain: > org.apache.hive.druid.io.druid.query.scan.ScanQuery["virtualColumns"]->java.util.ArrayList[0]) > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerChain.handleRecordReaderCreationException(HiveIOExceptionHandlerChain.java:97) > at > org.apache.hadoop.hive.io.HiveIOExceptionHandlerUtil.handleRecordReaderCreationException(HiveIOExceptionHandlerUtil.java:57) > at > org.apache.hadoop.hive.ql.io.HiveInputFormat.getRecordReader(HiveInputFormat.java:438) > at > org.apache.tez.mapreduce.lib.MRReaderMapred.setupOldRecordReader(MRReaderMapred.java:157) > at > org.apache.tez.mapreduce.lib.MRReaderMapred.setSplit(MRReaderMapred.java:83) > at > org.apache.tez.mapreduce.input.MRInput.initFromEventInternal(MRInput.java:703) > at > org.apache.tez.mapreduce.input.MRInput.initFromEvent(MRInput.java:662) > at > org.apache.tez.mapreduce.input.MRInputLegacy.checkAndAwaitRecordReaderInitialization(MRInputLegacy.java:150) > at > org.apache.tez.mapreduce.input.MRInputLegacy.init(MRInputLegacy.java:114) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.getMRInput(MapRecordProcessor.java:525) > at > org.apache.hadoop.hive.ql.exec.tez.MapRecordProcessor.init(MapRecordProcessor.java:171) > at > org.apache.hadoop.hive.ql.exec.tez.TezProcessor.initializeAndRunProcessor(TezProcessor.java:266) > ... 15 more > Caused by: > org.apache.hive.druid.com.fasterxml.jackson.databind.exc.InvalidDefinitionException: > Cannot construct instance of > `org.apache.hive.druid.io.druid.segment.virtual.ExpressionVirtualColumn`, > problem: org/antlr/v4/runtime/CharStream > at [Source: > (String)"{"queryType":"scan","dataSource":{"type":"table","name":"tpcds_real_bin_partitioned_orc_1000.tpcds_denormalized_druid_table_7mcd"},"intervals":{"type":"segments","segments":[{"itvl":"1998-11-30T00:00:00.000Z/1998-12-01T00:00:00.000Z","ver":"2018-05-03T11:35:22.230Z","part":0}]},"virtualColumns":[{"type":"expression","name":"vc","expression":"\"__time\"","outputType":"LONG"}],"resultFormat":"compactedList","batchSize":20480,"limit":9223372036854775807,"filter":{"type":"bound","dimension":"i_brand"[truncated > 241 chars]; line: 1, column: 376] (through reference chain: > org.apache.hive.druid.io.druid.query.scan.ScanQuery["virtualColumns"]->java.util.ArrayList[0]) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.exc.InvalidDefinitionException.from(InvalidDefinitionException.java:67) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.DeserializationContext.instantiationException(DeserializationContext.java:1601) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.wrapAsJsonMappingException(StdValueInstantiator.java:484) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.rewrapCtorProblem(StdValueInstantiator.java:503) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:285) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.ValueInstantiator.createFromObjectWith(ValueInstantiator.java:229) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.impl.PropertyBasedCreator.build(PropertyBasedCreator.java:195) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeUsingPropertyBased(BeanDeserializer.java:488) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromObjectUsingNonDefault(BeanDeserializerBase.java:1280) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:326) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:194) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:161) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer._deserializeTypedForId(AsPropertyTypeDeserializer.java:130) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer.deserializeTypedFromObject(AsPropertyTypeDeserializer.java:97) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.AbstractDeserializer.deserializeWithType(AbstractDeserializer.java:254) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:288) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:245) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.CollectionDeserializer.deserialize(CollectionDeserializer.java:27) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromArray(BeanDeserializerBase.java:1428) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:185) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:161) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.SettableBeanProperty.deserialize(SettableBeanProperty.java:529) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeWithErrorWrapping(BeanDeserializer.java:528) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeUsingPropertyBased(BeanDeserializer.java:417) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializerBase.deserializeFromObjectUsingNonDefault(BeanDeserializerBase.java:1280) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer.deserializeFromObject(BeanDeserializer.java:326) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer._deserializeOther(BeanDeserializer.java:194) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.BeanDeserializer.deserialize(BeanDeserializer.java:161) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer._deserializeTypedForId(AsPropertyTypeDeserializer.java:130) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.jsontype.impl.AsPropertyTypeDeserializer.deserializeTypedFromObject(AsPropertyTypeDeserializer.java:97) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.AbstractDeserializer.deserializeWithType(AbstractDeserializer.java:254) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.impl.TypeWrappedDeserializer.deserialize(TypeWrappedDeserializer.java:68) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper._readMapAndClose(ObjectMapper.java:4001) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.ObjectMapper.readValue(ObjectMapper.java:2992) > at > org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader.initialize(DruidQueryRecordReader.java:104) > at > org.apache.hadoop.hive.druid.serde.DruidQueryRecordReader.initialize(DruidQueryRecordReader.java:123) > at > org.apache.hadoop.hive.druid.io.DruidQueryBasedInputFormat.getRecordReader(DruidQueryBasedInputFormat.java:297) > at > org.apache.hadoop.hive.ql.io.HiveInputFormat.getRecordReader(HiveInputFormat.java:435) > ... 24 more > Caused by: java.lang.NoClassDefFoundError: org/antlr/v4/runtime/CharStream > at > org.apache.hive.druid.io.druid.segment.virtual.ExpressionVirtualColumn.<init>(ExpressionVirtualColumn.java:60) > at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method) > at > sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:62) > at > sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45) > at java.lang.reflect.Constructor.newInstance(Constructor.java:423) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.introspect.AnnotatedConstructor.call(AnnotatedConstructor.java:124) > at > org.apache.hive.druid.com.fasterxml.jackson.databind.deser.std.StdValueInstantiator.createFromObjectWith(StdValueInstantiator.java:283) > ... 57 more > Caused by: java.lang.ClassNotFoundException: org.antlr.v4.runtime.CharStream > at java.net.URLClassLoader.findClass(URLClassLoader.java:381) > at java.lang.ClassLoader.loadClass(ClassLoader.java:424) > at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:338) > at java.lang.ClassLoader.loadClass(ClassLoader.java:357) > ... 64 more > {code} -- This message was sent by Atlassian JIRA (v7.6.3#76005)