See https://builds.apache.org/job/Hadoop-Hdfs-trunk/753/
################################################################################### ########################## LAST 60 LINES OF THE CONSOLE ########################### [...truncated 1402551 lines...] [junit] 2011-08-16 04:27:18,730 INFO datanode.FSDatasetAsyncDiskService (FSDatasetAsyncDiskService.java:shutdown(142)) - All async disk service threads have been shut down. [junit] 2011-08-16 04:27:18,731 INFO mortbay.log (Slf4jLog.java:info(67)) - Stopped SelectChannelConnector@localhost:0 [junit] 2011-08-16 04:27:18,832 WARN blockmanagement.BlockManager (BlockManager.java:run(2614)) - ReplicationMonitor thread received InterruptedException. [junit] java.lang.InterruptedException: sleep interrupted [junit] at java.lang.Thread.sleep(Native Method) [junit] at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor.run(BlockManager.java:2612) [junit] at java.lang.Thread.run(Thread.java:662) [junit] 2011-08-16 04:27:18,832 WARN blockmanagement.DecommissionManager (DecommissionManager.java:run(75)) - Monitor interrupted: java.lang.InterruptedException: sleep interrupted [junit] 2011-08-16 04:27:18,832 INFO namenode.FSEditLog (FSEditLog.java:endCurrentLogSegment(859)) - Ending log segment 1 [junit] 2011-08-16 04:27:18,843 INFO namenode.FSEditLog (FSEditLog.java:printStatistics(492)) - Number of transactions: 8 Total time for transactions(ms): 0Number of transactions batched in Syncs: 0 Number of syncs: 7 SyncTimes(ms): 56 48 [junit] 2011-08-16 04:27:18,844 INFO ipc.Server (Server.java:stop(1715)) - Stopping server on 38584 [junit] 2011-08-16 04:27:18,844 INFO ipc.Server (Server.java:run(1539)) - IPC Server handler 0 on 38584: exiting [junit] 2011-08-16 04:27:18,844 INFO ipc.Server (Server.java:run(505)) - Stopping IPC Server listener on 38584 [junit] 2011-08-16 04:27:18,845 INFO ipc.Server (Server.java:run(647)) - Stopping IPC Server Responder [junit] 2011-08-16 04:27:18,845 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stop(199)) - Stopping DataNode metrics system... [junit] 2011-08-16 04:27:18,845 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics [junit] 2011-08-16 04:27:18,845 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source NameNodeActivity [junit] 2011-08-16 04:27:18,845 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort38584 [junit] 2011-08-16 04:27:18,846 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort38584 [junit] 2011-08-16 04:27:18,846 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source FSNamesystem [junit] 2011-08-16 04:27:18,846 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort41691 [junit] 2011-08-16 04:27:18,846 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort41691 [junit] 2011-08-16 04:27:18,846 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-1 [junit] 2011-08-16 04:27:18,847 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-43076 [junit] 2011-08-16 04:27:18,847 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort59622 [junit] 2011-08-16 04:27:18,847 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort59622 [junit] 2011-08-16 04:27:18,847 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-2 [junit] 2011-08-16 04:27:18,848 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-35471 [junit] 2011-08-16 04:27:18,848 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort34995 [junit] 2011-08-16 04:27:18,848 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort34995 [junit] 2011-08-16 04:27:18,848 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-3 [junit] 2011-08-16 04:27:18,848 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-33376 [junit] 2011-08-16 04:27:18,849 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcActivityForPort49871 [junit] 2011-08-16 04:27:18,849 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source RpcDetailedActivityForPort49871 [junit] 2011-08-16 04:27:18,849 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source JvmMetrics-4 [junit] 2011-08-16 04:27:18,849 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stopSources(408)) - Stopping metrics source DataNodeActivity-asf011.sp2.ygridcore.net-47339 [junit] 2011-08-16 04:27:18,849 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:stop(205)) - DataNode metrics system stopped. [junit] 2011-08-16 04:27:18,850 INFO impl.MetricsSystemImpl (MetricsSystemImpl.java:shutdown(553)) - DataNode metrics system shutdown complete. [junit] Tests run: 16, Failures: 0, Errors: 0, Time elapsed: 102.733 sec checkfailure: -run-test-hdfs-fault-inject-withtestcaseonly: run-test-hdfs-fault-inject: BUILD FAILED /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build.xml:777: Tests failed! Total time: 78 minutes 11 seconds [FINDBUGS] Skipping publisher since build result is FAILURE Archiving artifacts Recording fingerprints Recording test results Publishing Javadoc Publishing Clover coverage report... No Clover report will be published due to a Build Failure Email was triggered for: Failure Sending email for trigger: Failure ################################################################################### ############################## FAILED TESTS (if any) ############################## 7 tests failed. REGRESSION: org.apache.hadoop.hdfs.TestDFSRollback.testRollback Error Message: File contents differed: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data2/current/VERSION=d0342cd292d1c7d919b5f4b000f940de /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data1/current/VERSION=5baa37e94882a5fbc28d9d74bfc04db0 Stack Trace: junit.framework.AssertionFailedError: File contents differed: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data2/current/VERSION=d0342cd292d1c7d919b5f4b000f940de /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data1/current/VERSION=5baa37e94882a5fbc28d9d74bfc04db0 /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data2/current/VERSION=d0342cd292d1c7d919b5f4b000f940de /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/trunk/build/test/data/dfs/data1/current/VERSION=5baa37e94882a5fbc28d9d74bfc04db0 at org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil.assertFileContentsSame(FSImageTestUtil.java:251) at org.apache.hadoop.hdfs.server.namenode.FSImageTestUtil.assertParallelFilesAreIdentical(FSImageTestUtil.java:237) at org.apache.hadoop.hdfs.TestDFSRollback.checkResult(TestDFSRollback.java:86) at org.apache.hadoop.hdfs.TestDFSRollback.__CLR2_4_37oj5yb1gcn(TestDFSRollback.java:171) at org.apache.hadoop.hdfs.TestDFSRollback.testRollback(TestDFSRollback.java:134) REGRESSION: org.apache.hadoop.hdfs.TestParallelRead.testParallelRead Error Message: Timeout occurred. Please note the time in the report does not reflect the time until the timeout. Stack Trace: junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout. FAILED: org.apache.hadoop.hdfs.TestHDFSServerPorts.testSecondaryNodePorts Error Message: Directory /test/dfs/namesecondary is in an inconsistent state: checkpoint directory does not exist or is not accessible. Stack Trace: org.apache.hadoop.hdfs.server.common.InconsistentFSStateException: Directory /test/dfs/namesecondary is in an inconsistent state: checkpoint directory does not exist or is not accessible. at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.recoverCreate(SecondaryNameNode.java:801) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.initialize(SecondaryNameNode.java:222) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.<init>(SecondaryNameNode.java:175) at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.<init>(SecondaryNameNode.java:168) at org.apache.hadoop.hdfs.TestHDFSServerPorts.canStartSecondaryNode(TestHDFSServerPorts.java:224) at org.apache.hadoop.hdfs.TestHDFSServerPorts.__CLR2_4_3vpy47p1541(TestHDFSServerPorts.java:350) at org.apache.hadoop.hdfs.TestHDFSServerPorts.testSecondaryNodePorts(TestHDFSServerPorts.java:339) FAILED: org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSeparateEditsDirLocking Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:626) at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:541) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:257) at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:85) at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:243) at org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.__CLR2_4_3harbaz1hcb(TestCheckpoint.java:560) at org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSeparateEditsDirLocking(TestCheckpoint.java:553) FAILED: org.apache.hadoop.hdfs.server.namenode.TestNNThroughputBenchmark.testNNThroughput Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestNNThroughputBenchmark.__CLR2_4_3b2i9ur1f75(TestNNThroughputBenchmark.java:39) at org.apache.hadoop.hdfs.server.namenode.TestNNThroughputBenchmark.testNNThroughput(TestNNThroughputBenchmark.java:35) FAILED: org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatMatchingRPCandHttpPortsThrowException Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.__CLR2_4_3b49o261bi8(TestValidateConfigurationSettings.java:49) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatMatchingRPCandHttpPortsThrowException(TestValidateConfigurationSettings.java:43) FAILED: org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatDifferentRPCandHttpPortsAreOK Error Message: Cannot create directory /test/dfs/name/current Stack Trace: java.io.IOException: Cannot create directory /test/dfs/name/current at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.clearDirectory(Storage.java:276) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:492) at org.apache.hadoop.hdfs.server.namenode.NNStorage.format(NNStorage.java:512) at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:169) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1367) at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:242) at org.apache.hadoop.hdfs.DFSTestUtil.formatNameNode(DFSTestUtil.java:113) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.__CLR2_4_3ihms9r1bii(TestValidateConfigurationSettings.java:71) at org.apache.hadoop.hdfs.server.namenode.TestValidateConfigurationSettings.testThatDifferentRPCandHttpPortsAreOK(TestValidateConfigurationSettings.java:66)