[ https://issues.apache.org/jira/browse/HIVE-17718?page=com.atlassian.jira.plugin.system.issuetabpanels:comment-tabpanel&focusedCommentId=16196387#comment-16196387 ]
Hive QA commented on HIVE-17718: -------------------------------- Here are the results of testing the latest attachment: https://issues.apache.org/jira/secure/attachment/12890991/HIVE-17718.4.patch {color:red}ERROR:{color} -1 due to build exiting with an error Test results: https://builds.apache.org/job/PreCommit-HIVE-Build/7185/testReport Console output: https://builds.apache.org/job/PreCommit-HIVE-Build/7185/console Test logs: http://104.198.109.242/logs/PreCommit-HIVE-Build-7185/ Messages: {noformat} Executing org.apache.hive.ptest.execution.TestCheckPhase Executing org.apache.hive.ptest.execution.PrepPhase Tests exited with: NonZeroExitCodeException Command 'bash /data/hiveptest/working/scratch/source-prep.sh' failed with exit status 1 and output '+ date '+%Y-%m-%d %T.%3N' 2017-10-09 00:23:59.321 + [[ -n /usr/lib/jvm/java-8-openjdk-amd64 ]] + export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64 + export PATH=/usr/lib/jvm/java-8-openjdk-amd64/bin/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games + PATH=/usr/lib/jvm/java-8-openjdk-amd64/bin/:/usr/local/bin:/usr/bin:/bin:/usr/local/games:/usr/games + export 'ANT_OPTS=-Xmx1g -XX:MaxPermSize=256m ' + ANT_OPTS='-Xmx1g -XX:MaxPermSize=256m ' + export 'MAVEN_OPTS=-Xmx1g ' + MAVEN_OPTS='-Xmx1g ' + cd /data/hiveptest/working/ + tee /data/hiveptest/logs/PreCommit-HIVE-Build-7185/source-prep.txt + [[ false == \t\r\u\e ]] + mkdir -p maven ivy + [[ git = \s\v\n ]] + [[ git = \g\i\t ]] + [[ -z master ]] + [[ -d apache-github-source-source ]] + [[ ! -d apache-github-source-source/.git ]] + [[ ! -d apache-github-source-source ]] + date '+%Y-%m-%d %T.%3N' 2017-10-09 00:23:59.324 + cd apache-github-source-source + git fetch origin + git reset --hard HEAD HEAD is now at caf3330 HIVE-17728: TestHCatClient should use hive.metastore.transactional.event.listeners as per recommendation (Sankar Hariappan, reviewed by Thejas Nair) + git clean -f -d Removing standalone-metastore/src/gen/org/ + git checkout master Already on 'master' Your branch is up-to-date with 'origin/master'. + git reset --hard origin/master HEAD is now at caf3330 HIVE-17728: TestHCatClient should use hive.metastore.transactional.event.listeners as per recommendation (Sankar Hariappan, reviewed by Thejas Nair) + git merge --ff-only origin/master Already up-to-date. + date '+%Y-%m-%d %T.%3N' 2017-10-09 00:24:00.254 + patchCommandPath=/data/hiveptest/working/scratch/smart-apply-patch.sh + patchFilePath=/data/hiveptest/working/scratch/build.patch + [[ -f /data/hiveptest/working/scratch/build.patch ]] + chmod +x /data/hiveptest/working/scratch/smart-apply-patch.sh + /data/hiveptest/working/scratch/smart-apply-patch.sh /data/hiveptest/working/scratch/build.patch Going to apply patch with: patch -p1 patching file ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java patching file ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java patching file spark-client/src/main/java/org/apache/hive/spark/client/BaseProtocol.java patching file spark-client/src/main/java/org/apache/hive/spark/client/Message.java patching file spark-client/src/main/java/org/apache/hive/spark/client/RemoteDriver.java patching file spark-client/src/main/java/org/apache/hive/spark/client/rpc/Rpc.java patching file spark-client/src/main/java/org/apache/hive/spark/client/rpc/RpcServer.java + [[ maven == \m\a\v\e\n ]] + rm -rf /data/hiveptest/working/maven/org/apache/hive + mvn -B clean install -DskipTests -T 4 -q -Dmaven.repo.local=/data/hiveptest/working/maven protoc-jar: protoc version: 250, detected platform: linux/amd64 protoc-jar: executing: [/tmp/protoc6365283986091419098.exe, -I/data/hiveptest/working/apache-github-source-source/standalone-metastore/src/main/protobuf/org/apache/hadoop/hive/metastore, --java_out=/data/hiveptest/working/apache-github-source-source/standalone-metastore/target/generated-sources, /data/hiveptest/working/apache-github-source-source/standalone-metastore/src/main/protobuf/org/apache/hadoop/hive/metastore/metastore.proto] [ERROR] COMPILATION ERROR : [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[89,41] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[106,38] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[111,45] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[117,13] no suitable method found for call(org.apache.hive.spark.client.rpc.TestRpc.ErrorCall) method org.apache.hive.spark.client.rpc.Rpc.call(org.apache.hive.spark.client.Message) is not applicable (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.ErrorCall cannot be converted to org.apache.hive.spark.client.Message) method org.apache.hive.spark.client.rpc.Rpc.<T>call(org.apache.hive.spark.client.Message,java.lang.Class<T>) is not applicable (cannot infer type-variable(s) T (actual and formal argument lists differ in length)) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[125,47] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[238,13] no suitable method found for call(org.apache.hive.spark.client.rpc.TestRpc.NotDeserializable) method org.apache.hive.spark.client.rpc.Rpc.call(org.apache.hive.spark.client.Message) is not applicable (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.NotDeserializable cannot be converted to org.apache.hive.spark.client.Message) method org.apache.hive.spark.client.rpc.Rpc.<T>call(org.apache.hive.spark.client.Message,java.lang.Class<T>) is not applicable (cannot infer type-variable(s) T (actual and formal argument lists differ in length)) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[256,38] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[301,37] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; required: org.apache.hive.spark.client.Message,java.lang.Class<T> found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> reason: cannot infer type-variable(s) T (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) DataNucleus Enhancer (version 4.1.17) for API "JDO" DataNucleus Enhancer : Classpath >> /usr/share/maven/boot/plexus-classworlds-2.x.jar [ERROR] Failed to execute goal org.apache.maven.plugins:maven-compiler-plugin:3.6.1:testCompile (default-testCompile) on project spark-client: Compilation failure: Compilation failure: [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[89,41] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[106,38] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[111,45] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[117,13] no suitable method found for call(org.apache.hive.spark.client.rpc.TestRpc.ErrorCall) [ERROR] method org.apache.hive.spark.client.rpc.Rpc.call(org.apache.hive.spark.client.Message) is not applicable [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.ErrorCall cannot be converted to org.apache.hive.spark.client.Message) [ERROR] method org.apache.hive.spark.client.rpc.Rpc.<T>call(org.apache.hive.spark.client.Message,java.lang.Class<T>) is not applicable [ERROR] (cannot infer type-variable(s) T [ERROR] (actual and formal argument lists differ in length)) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[125,47] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[238,13] no suitable method found for call(org.apache.hive.spark.client.rpc.TestRpc.NotDeserializable) [ERROR] method org.apache.hive.spark.client.rpc.Rpc.call(org.apache.hive.spark.client.Message) is not applicable [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.NotDeserializable cannot be converted to org.apache.hive.spark.client.Message) [ERROR] method org.apache.hive.spark.client.rpc.Rpc.<T>call(org.apache.hive.spark.client.Message,java.lang.Class<T>) is not applicable [ERROR] (cannot infer type-variable(s) T [ERROR] (actual and formal argument lists differ in length)) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[256,38] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] /data/hiveptest/working/apache-github-source-source/spark-client/src/test/java/org/apache/hive/spark/client/rpc/TestRpc.java:[301,37] method call in class org.apache.hive.spark.client.rpc.Rpc cannot be applied to given types; [ERROR] required: org.apache.hive.spark.client.Message,java.lang.Class<T> [ERROR] found: org.apache.hive.spark.client.rpc.TestRpc.TestMessage,java.lang.Class<org.apache.hive.spark.client.rpc.TestRpc.TestMessage> [ERROR] reason: cannot infer type-variable(s) T [ERROR] (argument mismatch; org.apache.hive.spark.client.rpc.TestRpc.TestMessage cannot be converted to org.apache.hive.spark.client.Message) [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :spark-client + exit 1 ' {noformat} This message is automatically generated. ATTACHMENT ID: 12890991 - PreCommit-HIVE-Build > spark-client and job monitor logging improvements > ------------------------------------------------- > > Key: HIVE-17718 > URL: https://issues.apache.org/jira/browse/HIVE-17718 > Project: Hive > Issue Type: Bug > Components: Spark > Reporter: Sahil Takiar > Assignee: Sahil Takiar > Attachments: HIVE-17718.1.patch, HIVE-17718.2.patch, > HIVE-17718.3.patch, HIVE-17718.4.patch > > > Example: > {code} > 2017-10-05 17:47:11,881 ERROR > org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor: > [HiveServer2-Background-Pool: Thread-131]: Failed to monitor Job[ 2] with > exception 'java.lang.InterruptedException(sleep interrupted)' > java.lang.InterruptedException: sleep interrupted > at java.lang.Thread.sleep(Native Method) > at > org.apache.hadoop.hive.ql.exec.spark.status.RemoteSparkJobMonitor.startMonitor(RemoteSparkJobMonitor.java:124) > at > org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobRef.monitorJob(RemoteSparkJobRef.java:60) > at > org.apache.hadoop.hive.ql.exec.spark.SparkTask.execute(SparkTask.java:111) > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:99) > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2052) > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1748) > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1501) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1285) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1280) > at > org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:236) > at > org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:89) > at > org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:301) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917) > at > org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:314) > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > 2017-10-05 17:47:11,881 WARN org.apache.hadoop.hive.ql.Driver: > [HiveServer2-Handler-Pool: Thread-105]: Shutting down task : Stage-2:MAPRED > 2017-10-05 17:47:11,882 ERROR > org.apache.hadoop.hive.ql.exec.spark.status.SparkJobMonitor: > [HiveServer2-Background-Pool: Thread-131]: Failed to monitor Job[ 2] with > exception 'java.lang.InterruptedException(sleep interrupted)' > java.lang.InterruptedException: sleep interrupted > at java.lang.Thread.sleep(Native Method) > at > org.apache.hadoop.hive.ql.exec.spark.status.RemoteSparkJobMonitor.startMonitor(RemoteSparkJobMonitor.java:124) > at > org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobRef.monitorJob(RemoteSparkJobRef.java:60) > at > org.apache.hadoop.hive.ql.exec.spark.SparkTask.execute(SparkTask.java:111) > at org.apache.hadoop.hive.ql.exec.Task.executeTask(Task.java:214) > at > org.apache.hadoop.hive.ql.exec.TaskRunner.runSequential(TaskRunner.java:99) > at org.apache.hadoop.hive.ql.Driver.launchTask(Driver.java:2052) > at org.apache.hadoop.hive.ql.Driver.execute(Driver.java:1748) > at org.apache.hadoop.hive.ql.Driver.runInternal(Driver.java:1501) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1285) > at org.apache.hadoop.hive.ql.Driver.run(Driver.java:1280) > at > org.apache.hive.service.cli.operation.SQLOperation.runQuery(SQLOperation.java:236) > at > org.apache.hive.service.cli.operation.SQLOperation.access$300(SQLOperation.java:89) > at > org.apache.hive.service.cli.operation.SQLOperation$3$1.run(SQLOperation.java:301) > at java.security.AccessController.doPrivileged(Native Method) > at javax.security.auth.Subject.doAs(Subject.java:422) > at > org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1917) > at > org.apache.hive.service.cli.operation.SQLOperation$3.run(SQLOperation.java:314) > at > java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511) > at java.util.concurrent.FutureTask.run(FutureTask.java:266) > at > java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149) > at > java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624) > at java.lang.Thread.run(Thread.java:748) > {code} -- This message was sent by Atlassian JIRA (v6.4.14#64029)