See https://builds.apache.org/job/Hadoop-Hdfs-trunk/2641/
################################################################################### ########################## LAST 60 LINES OF THE CONSOLE ########################### [...truncated 8407 lines...] [INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project --- [INFO] Deleting /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target [INFO] [INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project --- [INFO] Executing tasks main: [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project --- [INFO] [INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project --- [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Apache Hadoop HDFS Client ......................... SUCCESS [12:24 min] [INFO] Apache Hadoop HDFS ................................ FAILURE [ 05:53 h] [INFO] Apache Hadoop HDFS Native Client .................. SKIPPED [INFO] Apache Hadoop HttpFS .............................. SKIPPED [INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED [INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED [INFO] Apache Hadoop HDFS Project ........................ SUCCESS [ 0.099 s] [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 06:05 h [INFO] Finished at: 2015-12-18T11:04:42+00:00 [INFO] Final Memory: 71M/800M [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hadoop-hdfs Build step 'Execute shell' marked build as failure Archiving artifacts Recording test results Sending e-mails to: hdfs-dev@hadoop.apache.org Email was triggered for: Failure - Any Sending email for trigger: Failure - Any ################################################################################### ############################## FAILED TESTS (if any) ############################## 30 tests failed. FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWriteWithMultipleDns[0] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWrite(TestEncryptedTransfer.java:532) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWriteWithMultipleDns(TestEncryptedTransfer.java:496) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedWriteClientAfterRestart[0] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedWriteClientAfterRestart(TestEncryptedTransfer.java:408) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedAppendRequiringBlockTransfer[0] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedAppendRequiringBlockTransfer(TestEncryptedTransfer.java:570) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadAfterNameNodeRestart[0] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadAfterNameNodeRestart(TestEncryptedTransfer.java:267) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWriteWithMultipleDns[1] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWrite(TestEncryptedTransfer.java:532) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedWriteWithMultipleDns(TestEncryptedTransfer.java:496) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedWriteClientAfterRestart[1] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedWriteClientAfterRestart(TestEncryptedTransfer.java:408) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedAppendRequiringBlockTransfer[1] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedAppendRequiringBlockTransfer(TestEncryptedTransfer.java:570) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadAfterNameNodeRestart[1] Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadAfterNameNodeRestart(TestEncryptedTransfer.java:267) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testClientThatDoesNotSupportEncryption[1] Error Message: Problem in starting http server. Server handlers failed Stack Trace: java.io.IOException: Problem in starting http server. Server handlers failed at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:860) at org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer.start(NameNodeHttpServer.java:142) at org.apache.hadoop.hdfs.server.namenode.NameNode.startHttpServer(NameNode.java:822) at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:675) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:884) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:863) at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1565) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testClientThatDoesNotSupportEncryption(TestEncryptedTransfer.java:309) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedClient[1] Error Message: Problem in starting http server. Server handlers failed Stack Trace: java.io.IOException: Problem in starting http server. Server handlers failed at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:860) at org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer.start(NameNodeHttpServer.java:142) at org.apache.hadoop.hdfs.server.namenode.NameNode.startHttpServer(NameNode.java:822) at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:675) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:884) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:863) at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1565) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testLongLivedClient(TestEncryptedTransfer.java:437) FAILED: org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadWithAES[1] Error Message: Cannot run program "bash": error=11, Resource temporarily unavailable Stack Trace: java.io.IOException: Cannot run program "bash": error=11, Resource temporarily unavailable at java.lang.UNIXProcess.forkAndExec(Native Method) at java.lang.UNIXProcess.<init>(UNIXProcess.java:186) at java.lang.ProcessImpl.start(ProcessImpl.java:130) at java.lang.ProcessBuilder.start(ProcessBuilder.java:1022) at org.apache.hadoop.util.Shell.runCommand(Shell.java:868) at org.apache.hadoop.util.Shell.run(Shell.java:838) at org.apache.hadoop.fs.DF.getFilesystem(DF.java:76) at org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker$CheckedVolume.<init>(NameNodeResourceChecker.java:69) at org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker.addDirToCheck(NameNodeResourceChecker.java:165) at org.apache.hadoop.hdfs.server.namenode.NameNodeResourceChecker.<init>(NameNodeResourceChecker.java:134) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.startCommonServices(FSNamesystem.java:1012) at org.apache.hadoop.hdfs.server.namenode.NameNode.startCommonServices(NameNode.java:748) at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:699) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:884) at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:863) at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1565) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestEncryptedTransfer.testEncryptedReadWithAES(TestEncryptedTransfer.java:214) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testProcessErasureCodingTasksSubmitionShouldSucceed Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.TestRecoverStripedFile.tearDown(TestRecoverStripedFile.java:99) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverOneDataBlock1 Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverOneDataBlock2 Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverAnyBlocks Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverOneParityBlock1 Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.TestRecoverStripedFile.tearDown(TestRecoverStripedFile.java:99) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverOneParityBlock2 Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverOneParityBlock3 Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestRecoverStripedFile.testRecoverThreeDataBlocks Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestRecoverStripedFile.setup(TestRecoverStripedFile.java:84) FAILED: org.apache.hadoop.hdfs.TestSetrepDecreasing.testSetrepDecreasing Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.TestSetrepIncreasing.setrep(TestSetrepIncreasing.java:74) at org.apache.hadoop.hdfs.TestSetrepDecreasing.testSetrepDecreasing(TestSetrepDecreasing.java:27) FAILED: org.apache.hadoop.hdfs.security.TestDelegationTokenForProxyUser.testWebHdfsDoAs Error Message: test timed out after 5000 milliseconds Stack Trace: java.lang.Exception: test timed out after 5000 milliseconds at java.net.SocketInputStream.socketRead0(Native Method) at java.net.SocketInputStream.read(SocketInputStream.java:152) at java.net.SocketInputStream.read(SocketInputStream.java:122) at java.io.BufferedInputStream.fill(BufferedInputStream.java:235) at java.io.BufferedInputStream.read1(BufferedInputStream.java:275) at java.io.BufferedInputStream.read(BufferedInputStream.java:334) at sun.net.www.http.HttpClient.parseHTTPHeader(HttpClient.java:687) at sun.net.www.http.HttpClient.parseHTTP(HttpClient.java:633) at sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1323) at java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:468) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.validateResponse(WebHdfsFileSystem.java:365) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.access$200(WebHdfsFileSystem.java:91) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$FsPathOutputStreamRunner$1.close(WebHdfsFileSystem.java:820) at org.apache.hadoop.hdfs.security.TestDelegationTokenForProxyUser.testWebHdfsDoAs(TestDelegationTokenForProxyUser.java:180) FAILED: org.apache.hadoop.hdfs.server.datanode.TestBlockScanner.testVolumeIteratorWithCaching Error Message: test timed out after 60000 milliseconds Stack Trace: java.lang.Exception: test timed out after 60000 milliseconds at org.apache.hadoop.security.proto.SecurityProtos$TokenProto.writeTo(SecurityProtos.java:331) at com.google.protobuf.CodedOutputStream.writeMessageNoTag(CodedOutputStream.java:380) at com.google.protobuf.CodedOutputStream.writeMessage(CodedOutputStream.java:222) at org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$BaseHeaderProto.writeTo(DataTransferProtos.java:1772) at com.google.protobuf.CodedOutputStream.writeMessageNoTag(CodedOutputStream.java:380) at com.google.protobuf.CodedOutputStream.writeMessage(CodedOutputStream.java:222) at org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$ClientOperationHeaderProto.writeTo(DataTransferProtos.java:3252) at com.google.protobuf.CodedOutputStream.writeMessageNoTag(CodedOutputStream.java:380) at com.google.protobuf.CodedOutputStream.writeMessage(CodedOutputStream.java:222) at org.apache.hadoop.hdfs.protocol.proto.DataTransferProtos$OpReadBlockProto.writeTo(DataTransferProtos.java:4596) at com.google.protobuf.AbstractMessageLite.writeDelimitedTo(AbstractMessageLite.java:90) at org.apache.hadoop.hdfs.protocol.datatransfer.Sender.send(Sender.java:78) at org.apache.hadoop.hdfs.protocol.datatransfer.Sender.readBlock(Sender.java:112) at org.apache.hadoop.hdfs.RemoteBlockReader2.newBlockReader(RemoteBlockReader2.java:404) at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReader(BlockReaderFactory.java:842) at org.apache.hadoop.hdfs.BlockReaderFactory.getRemoteBlockReaderFromTcp(BlockReaderFactory.java:733) at org.apache.hadoop.hdfs.BlockReaderFactory.build(BlockReaderFactory.java:375) at org.apache.hadoop.hdfs.DFSInputStream.getBlockReader(DFSInputStream.java:659) at org.apache.hadoop.hdfs.DFSInputStream.blockSeekTo(DFSInputStream.java:618) at org.apache.hadoop.hdfs.DFSInputStream.readWithStrategy(DFSInputStream.java:889) at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:941) at org.apache.hadoop.hdfs.DFSInputStream.read(DFSInputStream.java:714) at java.io.DataInputStream.readByte(DataInputStream.java:265) at org.apache.hadoop.hdfs.DFSTestUtil.getFirstBlock(DFSTestUtil.java:783) at org.apache.hadoop.hdfs.server.datanode.TestBlockScanner$TestContext.getFileBlock(TestBlockScanner.java:140) at org.apache.hadoop.hdfs.server.datanode.TestBlockScanner.testVolumeIteratorImpl(TestBlockScanner.java:174) at org.apache.hadoop.hdfs.server.datanode.TestBlockScanner.testVolumeIteratorWithCaching(TestBlockScanner.java:250) FAILED: org.apache.hadoop.hdfs.server.datanode.TestBlockScanner.testScanRateLimit Error Message: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x Stack Trace: java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:834) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.server.datanode.TestBlockScanner$TestContext.<init>(TestBlockScanner.java:96) at org.apache.hadoop.hdfs.server.datanode.TestBlockScanner.testScanRateLimit(TestBlockScanner.java:439) FAILED: org.apache.hadoop.hdfs.server.datanode.TestDataNodeReconfiguration.testAcquireWithMaxConcurrentMoversLessThanDefault Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at io.netty.util.concurrent.ThreadPerTaskExecutor.execute(ThreadPerTaskExecutor.java:33) at io.netty.util.concurrent.SingleThreadEventExecutor.doStartThread(SingleThreadEventExecutor.java:692) at io.netty.util.concurrent.SingleThreadEventExecutor.shutdownGracefully(SingleThreadEventExecutor.java:499) at io.netty.util.concurrent.MultithreadEventExecutorGroup.shutdownGracefully(MultithreadEventExecutorGroup.java:160) at io.netty.util.concurrent.AbstractEventExecutorGroup.shutdownGracefully(AbstractEventExecutorGroup.java:70) at org.apache.hadoop.hdfs.server.datanode.web.DatanodeHttpServer.close(DatanodeHttpServer.java:232) at org.apache.hadoop.hdfs.server.datanode.DataNode.shutdown(DataNode.java:1820) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdownDataNodes(MiniDFSCluster.java:1937) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1908) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882) at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875) at org.apache.hadoop.hdfs.server.datanode.TestDataNodeReconfiguration.tearDown(TestDataNodeReconfiguration.java:68) FAILED: org.apache.hadoop.hdfs.server.datanode.TestDataNodeReconfiguration.testAcquireWithMaxConcurrentMoversGreaterThanDefault Error Message: unable to create new native thread Stack Trace: java.lang.OutOfMemoryError: unable to create new native thread at java.lang.Thread.start0(Native Method) at java.lang.Thread.start(Thread.java:714) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1116) at org.apache.hadoop.hdfs.server.namenode.FSImage.saveFSImageInAllDirs(FSImage.java:1080) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:159) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1072) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:370) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:228) at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1005) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.server.datanode.TestDataNodeReconfiguration.startDFSCluster(TestDataNodeReconfiguration.java:85) at org.apache.hadoop.hdfs.server.datanode.TestDataNodeReconfiguration.Setup(TestDataNodeReconfiguration.java:62) FAILED: org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.testThrottling Error Message: test timed out after 300000 milliseconds Stack Trace: java.lang.Exception: test timed out after 300000 milliseconds at java.lang.Object.wait(Native Method) at java.lang.Object.wait(Object.java:503) at org.apache.hadoop.hdfs.DataStreamer.waitAndQueuePacket(DataStreamer.java:805) at org.apache.hadoop.hdfs.DFSOutputStream.enqueueCurrentPacket(DFSOutputStream.java:423) at org.apache.hadoop.hdfs.DFSOutputStream.enqueueCurrentPacketFull(DFSOutputStream.java:432) at org.apache.hadoop.hdfs.DFSOutputStream.writeChunk(DFSOutputStream.java:418) at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:217) at org.apache.hadoop.fs.FSOutputSummer.write1(FSOutputSummer.java:125) at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:111) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:57) at java.io.DataOutputStream.write(DataOutputStream.java:107) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:418) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:376) at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.createFile(TestDirectoryScanner.java:104) at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.testThrottling(TestDirectoryScanner.java:580) FAILED: org.apache.hadoop.hdfs.server.namenode.TestFileContextAcl.org.apache.hadoop.hdfs.server.namenode.TestFileContextAcl Error Message: org/apache/hadoop/util/PlatformName Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/util/PlatformName at org.apache.hadoop.security.UserGroupInformation.getOSLoginModuleName(UserGroupInformation.java:378) at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:423) at org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest.<clinit>(FSAclBaseTest.java:64) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.util.PlatformName at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at org.apache.hadoop.security.UserGroupInformation.getOSLoginModuleName(UserGroupInformation.java:378) at org.apache.hadoop.security.UserGroupInformation.<clinit>(UserGroupInformation.java:423) at org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest.<clinit>(FSAclBaseTest.java:64) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:24) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) FAILED: org.apache.hadoop.hdfs.server.namenode.TestFileContextAcl.org.apache.hadoop.hdfs.server.namenode.TestFileContextAcl Error Message: Could not initialize class org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest Stack Trace: java.lang.NoClassDefFoundError: Could not initialize class org.apache.hadoop.hdfs.server.namenode.FSAclBaseTest at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:33) at org.junit.runners.ParentRunner.run(ParentRunner.java:309) at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264) at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153) at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124) at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200) at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153) at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103) FAILED: org.apache.hadoop.hdfs.server.namenode.TestFsck.testFsckPermission Error Message: org/apache/hadoop/security/ShellBasedUnixGroupsMapping$PartialGroupNameException Stack Trace: java.lang.NoClassDefFoundError: org/apache/hadoop/security/ShellBasedUnixGroupsMapping$PartialGroupNameException at java.net.URLClassLoader$1.run(URLClassLoader.java:366) at java.net.URLClassLoader$1.run(URLClassLoader.java:355) at java.security.AccessController.doPrivileged(Native Method) at java.net.URLClassLoader.findClass(URLClassLoader.java:354) at java.lang.ClassLoader.loadClass(ClassLoader.java:425) at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308) at java.lang.ClassLoader.loadClass(ClassLoader.java:358) at java.lang.Class.getDeclaredConstructors0(Native Method) at java.lang.Class.privateGetDeclaredConstructors(Class.java:2493) at java.lang.Class.getConstructor0(Class.java:2803) at java.lang.Class.getDeclaredConstructor(Class.java:2053) at org.apache.hadoop.util.ReflectionUtils.newInstance(ReflectionUtils.java:128) at org.apache.hadoop.security.Groups.<init>(Groups.java:78) at org.apache.hadoop.security.Groups.<init>(Groups.java:74) at org.apache.hadoop.security.UserGroupInformation$TestingGroups.<init>(UserGroupInformation.java:1325) at org.apache.hadoop.security.UserGroupInformation$TestingGroups.<init>(UserGroupInformation.java:1319) at org.apache.hadoop.security.UserGroupInformation.createUserForTesting(UserGroupInformation.java:1359) at org.apache.hadoop.hdfs.server.namenode.TestFsck.testFsckPermission(TestFsck.java:296) FAILED: org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails Error Message: Wanted but not invoked: namenodeProtocols.getStats(); -> at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails(TestRequestHedgingProxyProvider.java:78) Actually, there were zero interactions with this mock. Stack Trace: org.mockito.exceptions.verification.WantedButNotInvoked: Wanted but not invoked: namenodeProtocols.getStats(); -> at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails(TestRequestHedgingProxyProvider.java:78) Actually, there were zero interactions with this mock. at org.apache.hadoop.hdfs.server.namenode.ha.TestRequestHedgingProxyProvider.testHedgingWhenOneFails(TestRequestHedgingProxyProvider.java:78)