See https://builds.apache.org/job/Hadoop-Hdfs-trunk/3131/
################################################################################### ########################## LAST 60 LINES OF THE CONSOLE ########################### [...truncated 8728 lines...] [INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project --- [INFO] Deleting /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target [INFO] [INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project --- [INFO] Executing tasks main: [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir [INFO] Executed tasks [INFO] [INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project --- [INFO] Skipping javadoc generation [INFO] [INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project --- [INFO] [INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project --- [INFO] [INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project --- [INFO] ------------------------------------------------------------------------ [INFO] Reactor Summary: [INFO] [INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:06 min] [INFO] Apache Hadoop HDFS ................................ FAILURE [ 01:17 h] [INFO] Apache Hadoop HDFS Native Client .................. SKIPPED [INFO] Apache Hadoop HttpFS .............................. SKIPPED [INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED [INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED [INFO] Apache Hadoop HDFS Project ........................ SUCCESS [ 0.116 s] [INFO] ------------------------------------------------------------------------ [INFO] BUILD FAILURE [INFO] ------------------------------------------------------------------------ [INFO] Total time: 01:22 h [INFO] Finished at: 2016-05-12T12:47:25+00:00 [INFO] Final Memory: 71M/900M [INFO] ------------------------------------------------------------------------ [ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1] [ERROR] [ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch. [ERROR] Re-run Maven using the -X switch to enable full debug logging. [ERROR] [ERROR] For more information about the errors and possible solutions, please read the following articles: [ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException [ERROR] [ERROR] After correcting the problems, you can resume the build with the command [ERROR] mvn <goals> -rf :hadoop-hdfs Build step 'Execute shell' marked build as failure Archiving artifacts Recording test results Sending e-mails to: hdfs-dev@hadoop.apache.org Email was triggered for: Failure - Any Sending email for trigger: Failure - Any ################################################################################### ############################## FAILED TESTS (if any) ############################## 12 tests failed. FAILED: org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI Error Message: test timed out after 60000 milliseconds Stack Trace: java.lang.Exception: test timed out after 60000 milliseconds at java.lang.Thread.sleep(Native Method) at org.apache.hadoop.hdfs.DFSOutputStream.completeFile(DFSOutputStream.java:825) at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:784) at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:755) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72) at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:101) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:430) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:379) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:372) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:365) at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncAPI(TestAsyncDFSRename.java:328) at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI(TestAsyncDFSRename.java:289) FAILED: org.apache.hadoop.hdfs.TestAsyncDFSRename.testConservativeConcurrentAsyncAPI Error Message: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2 permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x Stack Trace: java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2 permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:834) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncAPI(TestAsyncDFSRename.java:312) at org.apache.hadoop.hdfs.TestAsyncDFSRename.testConservativeConcurrentAsyncAPI(TestAsyncDFSRename.java:284) FAILED: org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite Error Message: test timed out after 60000 milliseconds Stack Trace: java.lang.Exception: test timed out after 60000 milliseconds at java.lang.Object.wait(Native Method) at org.apache.hadoop.hdfs.DataStreamer.waitForAckedSeqno(DataStreamer.java:768) at org.apache.hadoop.hdfs.DFSOutputStream.flushInternal(DFSOutputStream.java:697) at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:778) at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:755) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72) at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:101) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:430) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:379) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:372) at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:365) at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncRenameWithOverwrite(TestAsyncDFSRename.java:225) at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite(TestAsyncDFSRename.java:199) FAILED: org.apache.hadoop.hdfs.TestAsyncDFSRename.testCallGetReturnValueMultipleTimes Error Message: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2 permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x Stack Trace: java.io.IOException: Cannot remove data directory: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/datapath '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2 permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project permissions: drwx path '/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk': absolute:/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk permissions: drwx path '/home/jenkins/jenkins-slave/workspace': absolute:/home/jenkins/jenkins-slave/workspace permissions: drwx path '/home/jenkins/jenkins-slave': absolute:/home/jenkins/jenkins-slave permissions: drwx path '/home/jenkins': absolute:/home/jenkins permissions: drwx path '/home': absolute:/home permissions: dr-x path '/': absolute:/ permissions: dr-x at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:834) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441) at org.apache.hadoop.hdfs.TestAsyncDFSRename.testCallGetReturnValueMultipleTimes(TestAsyncDFSRename.java:133) FAILED: org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure190.test1 Error Message: failed, dn=1, length=4128768java.io.IOException: Failed at i=3538943 at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:443) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:419) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:322) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.run(TestDFSStripedOutputStreamWithFailure.java:527) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.test1(TestDFSStripedOutputStreamWithFailure.java:531) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) Caused by: java.io.IOException: Data streamers failed while creating new block streams: [#1: failed, blk_-9223372036854775759_1004, #3: failed, blk_-9223372036854775757_1004, #4: failed, blk_-9223372036854775756_1004, #7: failed, blk_-9223372036854775753_1004, #6: failed, blk_-9223372036854775754_1004, #5: failed, blk_-9223372036854775755_1004]. There are not enough healthy streamers. at org.apache.hadoop.hdfs.DFSStripedOutputStream.checkStreamerFailures(DFSStripedOutputStream.java:631) at org.apache.hadoop.hdfs.DFSStripedOutputStream.writeChunk(DFSStripedOutputStream.java:547) at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:217) at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:164) at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:145) at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:79) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:48) at java.io.DataOutputStream.write(DataOutputStream.java:88) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:441) ... 13 more Stack Trace: java.lang.AssertionError: failed, dn=1, length=4128768java.io.IOException: Failed at i=3538943 at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:443) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:419) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:322) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.run(TestDFSStripedOutputStreamWithFailure.java:527) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.test1(TestDFSStripedOutputStreamWithFailure.java:531) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) Caused by: java.io.IOException: Data streamers failed while creating new block streams: [#1: failed, blk_-9223372036854775759_1004, #3: failed, blk_-9223372036854775757_1004, #4: failed, blk_-9223372036854775756_1004, #7: failed, blk_-9223372036854775753_1004, #6: failed, blk_-9223372036854775754_1004, #5: failed, blk_-9223372036854775755_1004]. There are not enough healthy streamers. at org.apache.hadoop.hdfs.DFSStripedOutputStream.checkStreamerFailures(DFSStripedOutputStream.java:631) at org.apache.hadoop.hdfs.DFSStripedOutputStream.writeChunk(DFSStripedOutputStream.java:547) at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:217) at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:164) at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:145) at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:79) at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:48) at java.io.DataOutputStream.write(DataOutputStream.java:88) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:441) ... 13 more at org.junit.Assert.fail(Assert.java:88) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:327) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.run(TestDFSStripedOutputStreamWithFailure.java:527) at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.test1(TestDFSStripedOutputStreamWithFailure.java:531) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) FAILED: org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage Error Message: Cannot obtain block length for LocatedBlock{BP-697651589-67.195.81.147-1463054028093:blk_7162739548153522810_1020; getBlockSize()=1024; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[127.0.0.1:47738,DS-ebd63b8c-9730-4961-b59d-c98de683cd99,DISK]]} Stack Trace: java.io.IOException: Cannot obtain block length for LocatedBlock{BP-697651589-67.195.81.147-1463054028093:blk_7162739548153522810_1020; getBlockSize()=1024; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[127.0.0.1:47738,DS-ebd63b8c-9730-4961-b59d-c98de683cd99,DISK]]} at org.apache.hadoop.hdfs.DFSInputStream.readBlockLength(DFSInputStream.java:435) at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:345) at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:277) at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:267) at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1038) at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1003) at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.dfsOpenFileWithRetries(TestDFSUpgradeFromImage.java:178) at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyDir(TestDFSUpgradeFromImage.java:214) at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyFileSystem(TestDFSUpgradeFromImage.java:229) at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.upgradeAndVerify(TestDFSUpgradeFromImage.java:606) at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage(TestDFSUpgradeFromImage.java:628) FAILED: org.apache.hadoop.hdfs.TestFileAppend.testMultipleAppends Error Message: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:37498,DS-6e8ef62e-a4af-4a5d-b4c7-4b62e58d5213,DISK], DatanodeInfoWithStorage[127.0.0.1:44780,DS-7aa0fe78-3983-4f6a-9b6f-56c0bac5e6bf,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:37498,DS-6e8ef62e-a4af-4a5d-b4c7-4b62e58d5213,DISK], DatanodeInfoWithStorage[127.0.0.1:44780,DS-7aa0fe78-3983-4f6a-9b6f-56c0bac5e6bf,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. Stack Trace: java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:37498,DS-6e8ef62e-a4af-4a5d-b4c7-4b62e58d5213,DISK], DatanodeInfoWithStorage[127.0.0.1:44780,DS-7aa0fe78-3983-4f6a-9b6f-56c0bac5e6bf,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:37498,DS-6e8ef62e-a4af-4a5d-b4c7-4b62e58d5213,DISK], DatanodeInfoWithStorage[127.0.0.1:44780,DS-7aa0fe78-3983-4f6a-9b6f-56c0bac5e6bf,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration. at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1166) at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1236) at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1427) at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1342) at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1325) at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:603) FAILED: org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.TestScrLazyPersistFiles.testScrBlockFileCorruption Error Message: Expected: is <DISK> but: was <RAM_DISK> Stack Trace: java.lang.AssertionError: Expected: is <DISK> but: was <RAM_DISK> at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20) at org.junit.Assert.assertThat(Assert.java:865) at org.junit.Assert.assertThat(Assert.java:832) at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.LazyPersistTestCase.ensureFileReplicasOnStorageType(LazyPersistTestCase.java:141) at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.TestScrLazyPersistFiles.doShortCircuitReadBlockFileCorruptionTest(TestScrLazyPersistFiles.java:227) at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.TestScrLazyPersistFiles.testScrBlockFileCorruption(TestScrLazyPersistFiles.java:203) FAILED: org.apache.hadoop.hdfs.server.namenode.TestNameNodeMetadataConsistency.testGenerationStampInFuture Error Message: expected:<17> but was:<0> Stack Trace: java.lang.AssertionError: expected:<17> but was:<0> at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.failNotEquals(Assert.java:743) at org.junit.Assert.assertEquals(Assert.java:118) at org.junit.Assert.assertEquals(Assert.java:555) at org.junit.Assert.assertEquals(Assert.java:542) at org.apache.hadoop.hdfs.server.namenode.TestNameNodeMetadataConsistency.testGenerationStampInFuture(TestNameNodeMetadataConsistency.java:113) FAILED: org.apache.hadoop.hdfs.shortcircuit.TestShortCircuitCache.testDataXceiverCleansUpSlotsOnFailure Error Message: expected:<1> but was:<2> Stack Trace: java.lang.AssertionError: expected:<1> but was:<2> at org.junit.Assert.fail(Assert.java:88) at org.junit.Assert.failNotEquals(Assert.java:743) at org.junit.Assert.assertEquals(Assert.java:118) at org.junit.Assert.assertEquals(Assert.java:555) at org.junit.Assert.assertEquals(Assert.java:542) at org.apache.hadoop.hdfs.shortcircuit.TestShortCircuitCache$17.accept(TestShortCircuitCache.java:633) at org.apache.hadoop.hdfs.server.datanode.ShortCircuitRegistry.visit(ShortCircuitRegistry.java:403) at org.apache.hadoop.hdfs.shortcircuit.TestShortCircuitCache.checkNumberOfSegmentsAndSlots(TestShortCircuitCache.java:628) at org.apache.hadoop.hdfs.shortcircuit.TestShortCircuitCache.testDataXceiverCleansUpSlotsOnFailure(TestShortCircuitCache.java:682) FAILED: org.apache.hadoop.hdfs.web.TestWebHdfsTimeouts.testAuthUrlReadTimeout[timeoutSource=Configuration] Error Message: Expected to find 'localhost:40754: Read timed out' but got unexpected exception:java.net.SocketTimeoutException: localhost:40754: null at java.net.SocksSocketImpl.remainingMillis(SocksSocketImpl.java:111) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:579) at sun.net.NetworkClient.doConnect(NetworkClient.java:175) at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) at sun.net.www.http.HttpClient.<init>(HttpClient.java:211) at sun.net.www.http.HttpClient.New(HttpClient.java:308) at sun.net.www.http.HttpClient.New(HttpClient.java:326) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:700) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:653) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:725) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:571) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:602) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:598) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:1506) at org.apache.hadoop.hdfs.web.TestWebHdfsTimeouts.testAuthUrlReadTimeout(TestWebHdfsTimeouts.java:195) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) Stack Trace: java.lang.AssertionError: Expected to find 'localhost:40754: Read timed out' but got unexpected exception:java.net.SocketTimeoutException: localhost:40754: null at java.net.SocksSocketImpl.remainingMillis(SocksSocketImpl.java:111) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:579) at sun.net.NetworkClient.doConnect(NetworkClient.java:175) at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) at sun.net.www.http.HttpClient.<init>(HttpClient.java:211) at sun.net.www.http.HttpClient.New(HttpClient.java:308) at sun.net.www.http.HttpClient.New(HttpClient.java:326) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:700) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:653) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:725) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:571) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:602) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:598) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:1506) at org.apache.hadoop.hdfs.web.TestWebHdfsTimeouts.testAuthUrlReadTimeout(TestWebHdfsTimeouts.java:195) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47) at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12) at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44) at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17) at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74) at java.net.SocksSocketImpl.remainingMillis(SocksSocketImpl.java:111) at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392) at java.net.Socket.connect(Socket.java:579) at sun.net.NetworkClient.doConnect(NetworkClient.java:175) at sun.net.www.http.HttpClient.openServer(HttpClient.java:432) at sun.net.www.http.HttpClient.openServer(HttpClient.java:527) at sun.net.www.http.HttpClient.<init>(HttpClient.java:211) at sun.net.www.http.HttpClient.New(HttpClient.java:308) at sun.net.www.http.HttpClient.New(HttpClient.java:326) at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996) at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932) at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:700) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:653) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:725) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:571) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:602) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:598) at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:1506) at org.apache.hadoop.hdfs.web.TestWebHdfsTimeouts.testAuthUrlReadTimeout(TestWebHdfsTimeouts.java:195) FAILED: org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS Error Message: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":48013; Stack Trace: java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":48013; at sun.security.krb5.KdcComm.send(KdcComm.java:250) at sun.security.krb5.KdcComm.send(KdcComm.java:191) at sun.security.krb5.KrbTgsReq.send(KrbTgsReq.java:187) at sun.security.krb5.KrbTgsReq.sendAndGetCreds(KrbTgsReq.java:202) at sun.security.krb5.internal.CredentialsUtil.serviceCreds(CredentialsUtil.java:311) at sun.security.krb5.internal.CredentialsUtil.acquireServiceCreds(CredentialsUtil.java:115) at sun.security.krb5.Credentials.acquireServiceCreds(Credentials.java:449) at sun.security.jgss.krb5.Krb5Context.initSecContext(Krb5Context.java:641) at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:248) at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179) at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193) at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:411) at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:617) at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:417) at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:799) at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:795) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:415) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755) at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:794) at org.apache.hadoop.ipc.Client$Connection.access$3200(Client.java:417) at org.apache.hadoop.ipc.Client.getConnection(Client.java:1547) at org.apache.hadoop.ipc.Client.call(Client.java:1394) at org.apache.hadoop.ipc.Client.call(Client.java:1358) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:241) at com.sun.proxy.$Proxy25.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:582) at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:606) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:257) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103) at com.sun.proxy.$Proxy27.mkdirs(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2302) at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2277) at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1119) at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1116) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1116) at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1108) at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1909) at org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.createDirectoriesSecurely(TestRollingFileSystemSinkWithSecureHdfs.java:192) at org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS(TestRollingFileSystemSinkWithSecureHdfs.java:146)
--------------------------------------------------------------------- To unsubscribe, e-mail: hdfs-dev-unsubscr...@hadoop.apache.org For additional commands, e-mail: hdfs-dev-h...@hadoop.apache.org