See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/2547/changes>
Changes: [jianhe] MAPREDUCE-5485. Allow repeating job commit by extending OutputCommitter ------------------------------------------ [...truncated 6236 lines...] Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.587 sec - in org.apache.hadoop.hdfs.TestDeprecatedKeys Running org.apache.hadoop.hdfs.TestClientReportBadBlock Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.142 sec - in org.apache.hadoop.hdfs.TestClientReportBadBlock Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 87.401 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure020 Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.58 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure020 Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure180 Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.817 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure180 Running org.apache.hadoop.hdfs.TestEncryptionZones Results : Failed tests: TestXAttrConfigFlag.testRemoveXAttr Expected: (an instance of java.io.IOException and exception with message a string containing "dfs.namenode.xattrs.enabled") but: an instance of java.io.IOException <java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper> is a java.lang.NoClassDefFoundError Stacktrace was: java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.removeXAttr(ClientNamenodeProtocolTranslatorPB.java:1440) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:255) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103) at com.sun.proxy.$Proxy22.removeXAttr(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.removeXAttr(DFSClient.java:2767) at org.apache.hadoop.hdfs.DistributedFileSystem$54.doCall(DistributedFileSystem.java:2173) at org.apache.hadoop.hdfs.DistributedFileSystem$54.doCall(DistributedFileSystem.java:2170) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.removeXAttr(DistributedFileSystem.java:2170) at org.apache.hadoop.hdfs.server.namenode.TestXAttrConfigFlag.testRemoveXAttr(TestXAttrConfigFlag.java:78) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.rules.ExpectedException$ExpectedExceptionStatement.evaluate(ExpectedException.java:168) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.ipc.ProtobufHelper at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) ... 40 more TestXAttrConfigFlag.testGetXAttrs Expected: (an instance of java.io.IOException and exception with message a string containing "dfs.namenode.xattrs.enabled") but: an instance of java.io.IOException <java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper> is a java.lang.NoClassDefFoundError Stacktrace was: java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getXAttrs(ClientNamenodeProtocolTranslatorPB.java:1415) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:255) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103) at com.sun.proxy.$Proxy22.getXAttrs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.getXAttrs(DFSClient.java:2730) at org.apache.hadoop.hdfs.DistributedFileSystem$51.doCall(DistributedFileSystem.java:2123) at org.apache.hadoop.hdfs.DistributedFileSystem$51.doCall(DistributedFileSystem.java:2120) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.getXAttrs(DistributedFileSystem.java:2120) at org.apache.hadoop.hdfs.server.namenode.TestXAttrConfigFlag.testGetXAttrs(TestXAttrConfigFlag.java:70) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.rules.ExpectedException$ExpectedExceptionStatement.evaluate(ExpectedException.java:168) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) TestXAttrConfigFlag.testSetXAttr Expected: (an instance of java.io.IOException and exception with message a string containing "dfs.namenode.xattrs.enabled") but: an instance of java.io.IOException <java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper> is a java.lang.NoClassDefFoundError Stacktrace was: java.lang.NoClassDefFoundError: org/apache/hadoop/ipc/ProtobufHelper at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.setXAttr(ClientNamenodeProtocolTranslatorPB.java:1399) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:255) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103) at com.sun.proxy.$Proxy22.setXAttr(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.setXAttr(DFSClient.java:2703) at org.apache.hadoop.hdfs.DistributedFileSystem$49.doCall(DistributedFileSystem.java:2090) at org.apache.hadoop.hdfs.DistributedFileSystem$49.doCall(DistributedFileSystem.java:2086) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.setXAttr(DistributedFileSystem.java:2086) at org.apache.hadoop.fs.FileSystem.setXAttr(FileSystem.java:2520) at org.apache.hadoop.hdfs.server.namenode.TestXAttrConfigFlag.testSetXAttr(TestXAttrConfigFlag.java:62) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.rules.ExpectedException$ExpectedExceptionStatement.evaluate(ExpectedException.java:168) at org.junit.rules.RunRules.evaluate(RunRules.java:20) at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70) at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50) at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238) at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63) at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236) at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53) at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) Tests in error: TestFileTruncate.testTruncate4Symlink:1146 ยป NoClassDefFound org/apache/hadoop... Tests run: 2676, Failures: 3, Errors: 1, Skipped: 6 [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Skipping Apache Hadoop HDFS Native Client [INFO] This project has been banned from the build due to previous failures. [INFO] ------------------------------------------------------------------------ [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Skipping Apache Hadoop HttpFS [INFO] This project has been banned from the build due to previous failures. [INFO] ------------------------------------------------------------------------ [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Skipping Apache Hadoop HDFS BookKeeper Journal [INFO] This project has been banned from the build due to previous failures. [INFO] ------------------------------------------------------------------------ [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Skipping Apache Hadoop HDFS-NFS [INFO] This project has been banned from the build due to previous failures. [INFO] ------------------------------------------------------------------------ [INFO] [INFO] ------------------------------------------------------------------------ [INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT [INFO] ------------------------------------------------------------------------ [INFO] [INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project --- [INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target> [INFO] [INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project --- [INFO] Executing tasks main: [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir> [INFO] Executed tasks [INFO] [INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project --- [INFO] [INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project --- [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Apache Hadoop HDFS Client ......................... SUCCESS [03:55 min] [INFO] Apache Hadoop HDFS ................................ FAILURE [ 02:32 h] [INFO] Apache Hadoop HDFS Native Client .................. SKIPPED [INFO] Apache Hadoop HttpFS .............................. SKIPPED [INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED [INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED [INFO] Apache Hadoop HDFS Project ........................ SUCCESS [ 0.063 s] [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 02:35 h [INFO] Finished at: 2015-11-17T05:14:30+00:00 [INFO] Final Memory: 71M/853M [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: ExecutionException: java.lang.RuntimeException: The forked VM terminated without properly saying goodbye. VM crash or System.exit called? [ERROR] Command was /bin/sh -c cd <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs> && /home/jenkins/tools/java/jdk1.7.0_55/jre/bin/java -Xmx2048m -XX:MaxPermSize=768m -XX:+HeapDumpOnOutOfMemoryError -jar <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire/surefirebooter787204096498487977.jar> <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire/surefire351643180169820534tmp> <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire/surefire_2854623904433082878110tmp> [ERROR] -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hadoop-hdfs Build step 'Execute shell' marked build as failure Archiving artifacts Recording test results Updating MAPREDUCE-5485