See https://builds.apache.org/hudson/job/Hadoop-Hdfs-22-branch/39/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE 
###########################
[...truncated 3323 lines...]

compile-hdfs-test:
   [delete] Deleting directory 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          
jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and 
jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.155 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.208 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 15.591 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 42.669 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.356 sec
    [junit] Running 
org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 211.012 sec
    [junit] Running 
org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 362.137 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 35.72 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745:
 Tests failed!

Total time: 58 minutes 45 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) 
##############################
4 tests failed.
REGRESSION:  
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testCheckpoint

Error Message:
port out of range:-1

Stack Trace:
java.lang.IllegalArgumentException: port out of range:-1
        at java.net.InetSocketAddress.<init>(InetSocketAddress.java:118)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode$1.run(NameNode.java:520)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode$1.run(NameNode.java:460)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:396)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1142)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.startHttpServer(NameNode.java:460)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.activate(NameNode.java:404)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:388)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:577)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:570)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1533)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:445)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
        at 
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.__CLR3_0_2xuql33n8b(TestCheckpoint.java:648)
        at 
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testCheckpoint(TestCheckpoint.java:640)


REGRESSION:  
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSaveNamespace

Error Message:
Cannot lock storage 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1.
 The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage 
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1.
 The directory is already locked.
        at 
org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:618)
        at 
org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.analyzeStorage(Storage.java:472)
        at 
org.apache.hadoop.hdfs.server.namenode.FSImage.recoverTransitionRead(FSImage.java:406)
        at 
org.apache.hadoop.hdfs.server.namenode.FSDirectory.loadFSImage(FSDirectory.java:149)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.initialize(FSNamesystem.java:308)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.<init>(FSNamesystem.java:286)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.loadNamesystem(NameNode.java:328)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:356)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:577)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:570)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1533)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:445)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
        at 
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.__CLR3_0_2v7t0abn9p(TestCheckpoint.java:743)
        at 
org.apache.hadoop.hdfs.server.namenode.TestCheckpoint.testSaveNamespace(TestCheckpoint.java:737)


FAILED:  org.apache.hadoop.cli.TestHDFSCLI.testAll

Error Message:
One of the tests failed. See the Detailed results to identify the command that 
failed

Stack Trace:
junit.framework.AssertionFailedError: One of the tests failed. See the Detailed 
results to identify the command that failed
        at 
org.apache.hadoop.cli.CLITestHelper.displayResults(CLITestHelper.java:257)
        at org.apache.hadoop.cli.CLITestHelper.tearDown(CLITestHelper.java:119)
        at org.apache.hadoop.cli.TestHDFSCLI.tearDown(TestHDFSCLI.java:81)


FAILED:  org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0

Error Message:
127.0.0.1:55743is not an underUtilized node

Stack Trace:
junit.framework.AssertionFailedError: 127.0.0.1:55743is not an underUtilized 
node
        at 
org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:1011)
        at 
org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:953)
        at 
org.apache.hadoop.hdfs.server.balancer.Balancer.run(Balancer.java:1496)
        at 
org.apache.hadoop.hdfs.server.balancer.TestBalancer.runBalancer(TestBalancer.java:247)
        at 
org.apache.hadoop.hdfs.server.balancer.TestBalancer.test(TestBalancer.java:234)
        at 
org.apache.hadoop.hdfs.server.balancer.TestBalancer.oneNodeTest(TestBalancer.java:307)
        at 
org.apache.hadoop.hdfs.server.balancer.TestBalancer.__CLR3_0_29j3j5bp1f(TestBalancer.java:327)
        at 
org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0(TestBalancer.java:324)



Reply via email to