[ https://issues.apache.org/jira/browse/HIVE-17856?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16258223#comment-16258223 ]
Hive QA commented on HIVE-17856: -------------------------------- Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12898359/HIVE-17856.10.patch {color:red}ERROR:{color} -1 due to build exiting with an error Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/7908/testReport Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/7908/console Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-7908/ Messages: {noformat} Executing org.apache.hive.ptest.execution.TestCheckPhase Executing org.apache.hive.ptest.execution.PrepPhase Tests exited with: NonZeroExitCodeException Command 'bash /data/hiveptest/working/scratch/source-prep.sh' failed with exit status 1 and output '+ date '+%Y-%m-%d %T.%3N' 2017-11-18 21:23:36.238 + [[ -n /usr/lib/jvm/java-8-openjdk-amd64 ]] + export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + export PATH=/usr/lib/jvm/java-8-openjdk-amd64/bin/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games + PATH=/usr/lib/jvm/java-8-openjdk-amd64/bin/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games + export 'ANT_OPTS=-Xmx1g -XX:MaxPermSize=256m ' + ANT_OPTS='-Xmx1g -XX:MaxPermSize=256m ' + export 'MAVEN_OPTS=-Xmx1g ' + MAVEN_OPTS='-Xmx1g ' + cd /data/hiveptest/working/ + tee /data/hiveptest/logs/PreCommit-HIVE-Build-7908/source-prep.txt + [[ false == \t\r\u\e ]] + mkdir -p maven ivy + [[ git = \s\v\n ]] + [[ git = \g\i\t ]] + [[ -z master ]] + [[ -d apache-github-source-source ]] + [[ ! -d apache-github-source-source/.git ]] + [[ ! -d apache-github-source-source ]] + date '+%Y-%m-%d %T.%3N' 2017-11-18 21:23:36.246 + cd apache-github-source-source + git fetch origin + git reset --hard HEAD HEAD is now at f1698b6 HIVE-18056: CachedStore: Have a whitelist/blacklist config to allow selective caching of tables/partitions and allow read while prewarming (Vaibhav Gumashta, Daniel Dai, reviewed by Thejas Nair, Sergey Shelukhin) + git clean -f -d Removing ql/src/test/queries/clientpositive/unionDistinct_3.q Removing ql/src/test/results/clientpositive/llap/explainanalyze_2.q.out Removing ql/src/test/results/clientpositive/llap/unionDistinct_3.q.out + git checkout master Already on 'master' Your branch is up-to-date with 'origin/master'. + git reset --hard origin/master HEAD is now at f1698b6 HIVE-18056: CachedStore: Have a whitelist/blacklist config to allow selective caching of tables/partitions and allow read while prewarming (Vaibhav Gumashta, Daniel Dai, reviewed by Thejas Nair, Sergey Shelukhin) + git merge --ff-only origin/master Already up-to-date. + date '+%Y-%m-%d %T.%3N' 2017-11-18 21:23:37.595 + patchCommandPath=/data/hiveptest/working/scratch/smart-apply-patch.sh + patchFilePath=/data/hiveptest/working/scratch/build.patch + [[ -f /data/hiveptest/working/scratch/build.patch ]] + chmod +x /data/hiveptest/working/scratch/smart-apply-patch.sh + /data/hiveptest/working/scratch/smart-apply-patch.sh /data/hiveptest/working/scratch/build.patch Going to apply patch with: patch -p0 patching file common/src/java/org/apache/hadoop/hive/common/JavaUtils.java patching file ql/src/java/org/apache/hadoop/hive/ql/exec/AbstractFileMergeOperator.java patching file ql/src/java/org/apache/hadoop/hive/ql/exec/FileSinkOperator.java patching file ql/src/java/org/apache/hadoop/hive/ql/exec/JoinOperator.java patching file ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java patching file ql/src/java/org/apache/hadoop/hive/ql/io/AcidUtils.java patching file ql/src/java/org/apache/hadoop/hive/ql/io/HiveInputFormat.java patching file ql/src/java/org/apache/hadoop/hive/ql/metadata/Hive.java patching file ql/src/java/org/apache/hadoop/hive/ql/parse/QBParseInfo.java patching file ql/src/java/org/apache/hadoop/hive/ql/parse/SemanticAnalyzer.java patching file ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommandsForMmTable.java patching file ql/src/test/org/apache/hadoop/hive/ql/TestTxnCommandsForOrcMmTable.java patching file ql/src/test/org/apache/hadoop/hive/ql/TxnCommandsBaseForTests.java patching file ql/src/test/org/apache/hadoop/hive/ql/exec/TestUtilities.java patching file ql/src/test/results/clientpositive/mm_all.q.out + [[ maven == \m\a\v\e\n ]] + rm -rf /data/hiveptest/working/maven/org/apache/hive + mvn -B clean install -DskipTests -T 4 -q -Dmaven.repo.local=/data/hiveptest/working/maven protoc-jar: protoc version: 250, detected platform: linux/amd64 protoc-jar: executing: [/tmp/protoc3196949482387587835.exe, -I/data/hiveptest/working/apache-github-source-source/standalone-metastore/src/main/protobuf/org/apache/hadoop/hive/metastore, --java_out=/data/hiveptest/working/apache-github-source-source/standalone-metastore/target/generated-sources, /data/hiveptest/working/apache-github-source-source/standalone-metastore/src/main/protobuf/org/apache/hadoop/hive/metastore/metastore.proto] ANTLR Parser Generator Version 3.5.2 Output file /data/hiveptest/working/apache-github-source-source/standalone-metastore/target/generated-sources/org/apache/hadoop/hive/metastore/parser/FilterParser.java does not exist: must build /data/hiveptest/working/apache-github-source-source/standalone-metastore/src/main/java/org/apache/hadoop/hive/metastore/parser/Filter.g org/apache/hadoop/hive/metastore/parser/Filter.g log4j:WARN No appenders could be found for logger (DataNucleus.General). log4j:WARN Please initialize the log4j system properly. DataNucleus Enhancer (version 4.1.17) for API "JDO" DataNucleus Enhancer : Classpath >> /usr/share/maven/boot/plexus-classworlds-2.x.jar ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MDatabase ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MFieldSchema ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MType ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MTable ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MConstraint ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MSerDeInfo ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MOrder ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MColumnDescriptor ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MStringList ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MStorageDescriptor ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MPartition ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MIndex ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MRole ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MRoleMap ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MGlobalPrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MDBPrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MTablePrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MPartitionPrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MTableColumnPrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MPartitionColumnPrivilege ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MPartitionEvent ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MMasterKey ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MDelegationToken ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MTableColumnStatistics ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MPartitionColumnStatistics ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MVersionTable ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MMetastoreDBProperties ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MResourceUri ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MFunction ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MNotificationLog ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MNotificationNextId ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MWMResourcePlan ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MWMPool ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MWMTrigger ENHANCED (Persistable) : org.apache.hadoop.hive.metastore.model.MWMMapping DataNucleus Enhancer completed with success for 35 classes. Timings : input=204 ms, enhance=215 ms, total=419 ms. Consult the log for full details ANTLR Parser Generator Version 3.5.2 Output file /data/hiveptest/working/apache-github-source-source/ql/target/generated-sources/antlr3/org/apache/hadoop/hive/ql/parse/HiveLexer.java does not exist: must build /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveLexer.g org/apache/hadoop/hive/ql/parse/HiveLexer.g Output file /data/hiveptest/working/apache-github-source-source/ql/target/generated-sources/antlr3/org/apache/hadoop/hive/ql/parse/HiveParser.java does not exist: must build /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/parse/HiveParser.g org/apache/hadoop/hive/ql/parse/HiveParser.g Output file /data/hiveptest/working/apache-github-source-source/ql/target/generated-sources/antlr3/org/apache/hadoop/hive/ql/parse/HintParser.java does not exist: must build /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/parse/HintParser.g org/apache/hadoop/hive/ql/parse/HintParser.g Generating vector expression code Generating vector expression test code [ERROR] COMPILATION ERROR : [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java:[784,31] method getPartitionDesc(org.apache.hadoop.hive.ql.metadata.Partition,org.apache.hadoop.hive.ql.plan.TableDesc) is already defined in class org.apache.hadoop.hive.ql.exec.Utilities [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java:[1711,22] method extractNonDpMmDir in class org.apache.hadoop.hive.ql.exec.Utilities cannot be applied to given types; required: java.lang.Long,int,org.apache.hadoop.fs.FileStatus[],boolean found: java.lang.Long,int,org.apache.hadoop.fs.FileStatus[] reason: actual and formal argument lists differ in length [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java:[139,38] cannot find symbol symbol: method getAllMaterializedViewObjects(java.lang.String) location: variable db of type org.apache.hadoop.hive.ql.metadata.Hive [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.6.1:compile (default-compile) on project hive-exec: Compilation failure: Compilation failure: [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java:[784,31] method getPartitionDesc(org.apache.hadoop.hive.ql.metadata.Partition,org.apache.hadoop.hive.ql.plan.TableDesc) is already defined in class org.apache.hadoop.hive.ql.exec.Utilities [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/exec/Utilities.java:[1711,22] method extractNonDpMmDir in class org.apache.hadoop.hive.ql.exec.Utilities cannot be applied to given types; [ERROR] required: java.lang.Long,int,org.apache.hadoop.fs.FileStatus[],boolean [ERROR] found: java.lang.Long,int,org.apache.hadoop.fs.FileStatus[] [ERROR] reason: actual and formal argument lists differ in length [ERROR] /data/hiveptest/working/apache-github-source-source/ql/src/java/org/apache/hadoop/hive/ql/metadata/HiveMaterializedViewsRegistry.java:[139,38] cannot find symbol [ERROR] symbol: method getAllMaterializedViewObjects(java.lang.String) [ERROR] location: variable db of type org.apache.hadoop.hive.ql.metadata.Hive [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hive-exec + exit 1 ' {noformat} This message is automatically generated. ATTACHMENT ID: 12898359 - PreCommit-HIVE-Build > MM tables - IOW is not ACID compliant > ------------------------------------- > > Key: HIVE-17856 > URL: https://issues.apache.org/jira/browse/HIVE-17856 > Project: Hive > Issue Type: Sub-task > Components: Transactions > Reporter: Sergey Shelukhin > Assignee: Steve Yeom > Labels: mm-gap-1 > Attachments: HIVE-17856.1.patch, HIVE-17856.10.patch, > HIVE-17856.2.patch, HIVE-17856.3.patch, HIVE-17856.4.patch, > HIVE-17856.5.patch, HIVE-17856.6.patch, HIVE-17856.7.patch, > HIVE-17856.8.patch, HIVE-17856.9.patch > > > The following tests were removed from mm_all during "integration"... I should > have never allowed such manner of intergration. > MM logic should have been kept intact until ACID logic could catch up. Alas, > here we are. > {noformat} > drop table iow0_mm; > create table iow0_mm(key int) tblproperties("transactional"="true", > "transactional_properties"="insert_only"); > insert overwrite table iow0_mm select key from intermediate; > insert into table iow0_mm select key + 1 from intermediate; > select * from iow0_mm order by key; > insert overwrite table iow0_mm select key + 2 from intermediate; > select * from iow0_mm order by key; > drop table iow0_mm; > drop table iow1_mm; > create table iow1_mm(key int) partitioned by (key2 int) > tblproperties("transactional"="true", > "transactional_properties"="insert_only"); > insert overwrite table iow1_mm partition (key2) > select key as k1, key from intermediate union all select key as k1, key from > intermediate; > insert into table iow1_mm partition (key2) > select key + 1 as k1, key from intermediate union all select key as k1, key > from intermediate; > select * from iow1_mm order by key, key2; > insert overwrite table iow1_mm partition (key2) > select key + 3 as k1, key from intermediate union all select key + 4 as k1, > key from intermediate; > select * from iow1_mm order by key, key2; > insert overwrite table iow1_mm partition (key2) > select key + 3 as k1, key + 3 from intermediate union all select key + 2 as > k1, key + 2 from intermediate; > select * from iow1_mm order by key, key2; > drop table iow1_mm; > {noformat} > {noformat} > drop table simple_mm; > create table simple_mm(key int) stored as orc tblproperties > ("transactional"="true", "transactional_properties"="insert_only"); > insert into table simple_mm select key from intermediate; > -insert overwrite table simple_mm select key from intermediate; > {noformat} -- This message was sent by Atlassian JIRA (v6.4.14#64029)