[ https://issues.apache.org/jira/browse/HIVE-19155?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16434966#comment-16434966 ]
Hive QA commented on HIVE-19155: -------------------------------- Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12918445/HIVE-19155.patch {color:green}SUCCESS:{color} +1 due to 1 test(s) being added or modified. {color:red}ERROR:{color} -1 due to 106 failed/errored test(s), 14069 tests executed *Failed tests:* {noformat} TestBeeLineDriver - did not produce a TEST-*.xml file (likely timed out) (batchId=253) TestDbNotificationListener - did not produce a TEST-*.xml file (likely timed out) (batchId=247) TestDummy - did not produce a TEST-*.xml file (likely timed out) (batchId=253) TestHCatHiveCompatibility - did not produce a TEST-*.xml file (likely timed out) (batchId=247) TestMiniDruidCliDriver - did not produce a TEST-*.xml file (likely timed out) (batchId=253) TestMiniDruidKafkaCliDriver - did not produce a TEST-*.xml file (likely timed out) (batchId=253) TestNonCatCallsWithCatalog - did not produce a TEST-*.xml file (likely timed out) (batchId=217) TestSequenceFileReadWrite - did not produce a TEST-*.xml file (likely timed out) (batchId=247) TestTezPerfCliDriver - did not produce a TEST-*.xml file (likely timed out) (batchId=253) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[groupby_groupingset_bug] (batchId=174) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[mergejoin] (batchId=168) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[sysdb] (batchId=163) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[tez_smb_1] (batchId=171) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[tez_smb_main] (batchId=160) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[update_access_time_non_current_db] (batchId=172) org.apache.hadoop.hive.cli.TestMiniLlapLocalCliDriver.testCliDriver[vectorized_dynamic_semijoin_reduction] (batchId=155) org.apache.hadoop.hive.cli.TestMiniTezCliDriver.testCliDriver[explainanalyze_5] (batchId=105) org.apache.hadoop.hive.cli.TestNegativeCliDriver.org.apache.hadoop.hive.cli.TestNegativeCliDriver (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.org.apache.hadoop.hive.cli.TestNegativeCliDriver (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[alter_notnull_constraint_violation] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[default_constraint_invalid_default_value_type] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[insert_into_acid_notnull] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[insert_into_notnull_constraint] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[insert_multi_into_notnull] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[insert_overwrite_notnull_constraint] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_bucketmapjoin] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[smb_mapjoin_14] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[sortmerge_mapjoin_mismatch_1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_aggregator_error_2] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_publisher_error_1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[stats_publisher_error_2] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_corr_in_agg] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_in_implicit_gby] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[subquery_notin_implicit_gby] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_bucketed_column] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_column_list_bucketing] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_column_seqfile] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_nonexistant_column] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_partition_column2] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[truncate_partition_column] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_invalid] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeall_wrong1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_likeany_wrong1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_num] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_keys_arg_type] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_map_values_arg_type] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_max] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_min] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_next_day_error_2] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_nonexistent_resource] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_printf_wrong4] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_reflect_neg] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_size_wrong_args_len] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_by_wrong1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_by_wrong3] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong2] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_sort_array_wrong3] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_test_error] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_test_error_reduce] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error2] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udf_trunc_error3] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported1] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported2] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported3] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_explode_not_supported4] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_invalid_place] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported1] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[udtf_not_supported3] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union22] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[union2] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionClusterBy] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionDistributeBy] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionLimit] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionOrderBy] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unionSortBy] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin2] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin3] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[uniquejoin] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unset_table_property] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[unset_view_property] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[updateBasicStats] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_bucket_col] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_no_such_table] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_non_acid_table] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_not_acid] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_notnull_constraint] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[update_partition_col] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[view_update] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_after_orderby] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_invalid_udaf] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_leadlag_in_udaf] (batchId=96) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_ll_no_neg] (batchId=95) org.apache.hadoop.hive.cli.TestNegativeCliDriver.testCliDriver[windowing_ll_no_over] (batchId=95) org.apache.hadoop.hive.metastore.TestMetastoreVersion.testMetastoreVersion (batchId=227) org.apache.hadoop.hive.metastore.TestMetastoreVersion.testVersionMatching (batchId=227) org.apache.hadoop.hive.ql.TestAcidOnTez.testGetSplitsLocks (batchId=228) org.apache.hadoop.hive.ql.TestMTQueries.testMTQueries1 (batchId=232) org.apache.hive.beeline.TestBeeLineWithArgs.testEscapeCRLFInCSVOutput (batchId=235) org.apache.hive.beeline.TestBeeLineWithArgs.testEscapeCRLFInDSVOutput (batchId=235) org.apache.hive.beeline.TestBeeLineWithArgs.testEscapeCRLFInTSV2Output (batchId=235) org.apache.hive.beeline.TestBeeLineWithArgs.testEscapeCRLFOffInDSVOutput (batchId=235) org.apache.hive.jdbc.TestJdbcWithMiniLlap.testLlapInputFormatEndToEnd (batchId=238) {noformat} Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/10148/testReport Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/10148/console Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-10148/ Messages: {noformat} Executing org.apache.hive.ptest.execution.TestCheckPhase Executing org.apache.hive.ptest.execution.PrepPhase Executing org.apache.hive.ptest.execution.YetusPhase Executing org.apache.hive.ptest.execution.ExecutionPhase Executing org.apache.hive.ptest.execution.ReportingPhase Tests exited with: TestsFailedException: 106 tests failed {noformat} This message is automatically generated. ATTACHMENT ID: 12918445 - PreCommit-HIVE-Build > Day time saving cause Druid inserts to fail with > org.apache.hive.druid.io.druid.java.util.common.UOE: Cannot add overlapping > segments > ------------------------------------------------------------------------------------------------------------------------------------- > > Key: HIVE-19155 > URL: https://issues.apache.org/jira/browse/HIVE-19155 > Project: Hive > Issue Type: Bug > Components: Druid integration > Reporter: slim bouguerra > Assignee: slim bouguerra > Priority: Major > Attachments: HIVE-19155.patch > > > If you try to insert data around the daylight saving time hour the query > fails with following exception > {code} > 2018-04-10T11:24:58,836 ERROR [065fdaa2-85f9-4e49-adaf-3dc14d51be90 main] > exec.DDLTask: Failed > org.apache.hadoop.hive.ql.metadata.HiveException: > org.apache.hive.druid.io.druid.java.util.common.UOE: Cannot add overlapping > segments [2015-03-08T05:00:00.000Z/2015-03-09T05:00:00.000Z and > 2015-03-09T04:00:00.000Z/2015-03-10T04:00:00.000Z] with the same version > [2018-04-10T11:24:48.388-07:00] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:914) > ~[hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.metadata.Hive.createTable(Hive.java:919) > ~[hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.DDLTask.createTable(DDLTask.java:4831) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.DDLTask.execute(DDLTask.java:394) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:205) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:97) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2443) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:2114) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1797) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1538) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1532) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:157) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.reexec.ReExecDriver.run(ReExecDriver.java:204) > [hive-exec-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.cli.CliDriver.processLocalCmd(CliDriver.java:239) > [hive-cli-3.1.0-SNAPSHOT.jar:?] > at > org.apache.hadoop.hive.cli.CliDriver.processCmd(CliDriver.java:188) > [hive-cli-3.1.0-SNAPSHOT.jar:?] > at > org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:402) > [hive-cli-3.1.0-SNAPSHOT.jar:?] > at > org.apache.hadoop.hive.cli.CliDriver.processLine(CliDriver.java:335) > [hive-cli-3.1.0-SNAPSHOT.jar:?] > at > org.apache.hadoop.hive.ql.QTestUtil.executeClientInternal(QTestUtil.java:1455) > [hive-it-util-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.ql.QTestUtil.executeClient(QTestUtil.java:1429) > [hive-it-util-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.cli.control.CoreCliDriver.runTest(CoreCliDriver.java:177) > [hive-it-util-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.cli.control.CliAdapter.runTest(CliAdapter.java:104) > [hive-it-util-3.1.0-SNAPSHOT.jar:3.1.0-SNAPSHOT] > at > org.apache.hadoop.hive.cli.TestMiniDruidCliDriver.testCliDriver(TestMiniDruidCliDriver.java:59) > [test-classes/:?] > at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) > ~[?:1.8.0_92] > at > sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) > ~[?:1.8.0_92] > at > sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) > ~[?:1.8.0_92] > {code} > You can reproduce this using the following DDL > {code} > create database druid_test; > use druid_test; > create table test_table(`timecolumn` timestamp, `userid` string, `num_l` > float); > insert into test_table values ('2015-03-08 00:00:00', 'i1-start', 4); > insert into test_table values ('2015-03-08 23:59:59', 'i1-end', 1); > insert into test_table values ('2015-03-09 00:00:00', 'i2-start', 4); > insert into test_table values ('2015-03-09 23:59:59', 'i2-end', 1); > insert into test_table values ('2015-03-10 00:00:00', 'i3-start', 2); > insert into test_table values ('2015-03-10 23:59:59', 'i3-end', 2); > CREATE TABLE druid_table > STORED BY 'org.apache.hadoop.hive.druid.DruidStorageHandler' > TBLPROPERTIES ("druid.segment.granularity" = "DAY") > AS > select cast(`timecolumn` as timestamp with local time zone) as `__time`, > `userid`, `num_l` FROM test_table; > {code} > The fix is to always adjust the Druid segments identifiers to UTC. -- This message was sent by Atlassian JIRA (v7.6.3#76005)