See https://builds.apache.org/job/Hadoop-Hdfs-trunk/1908/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE 
###########################
[...truncated 6242 lines...]
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.1.2:test-jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.3:attach-descriptor (attach-descriptor) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ 
hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable 
package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project 
---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.6:checkstyle (default-cli) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:2.3.2:findbugs (default-cli) @ 
hadoop-hdfs-project ---
[INFO] ****** FindBugsMojo execute *******
[INFO] canGenerate is false
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS ................................ FAILURE [  02:21 h]
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  2.227 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:21 h
[INFO] Finished at: 2014-10-21T13:56:49+00:00
[INFO] Final Memory: 63M/893M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.16:test (default-test) on 
project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to 
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports
 for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Updating YARN-1879
Updating HDFS-7184
Updating YARN-2701
Updating HDFS-7266
Updating HADOOP-11194
Updating YARN-2582
Updating YARN-2673
Updating YARN-2717
Updating HDFS-7154
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) 
##############################
4 tests failed.
REGRESSION:  
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyIsHot.testStandbyIsHot

Error Message:
Problem in starting http server. Server handlers failed

Stack Trace:
java.io.IOException: Problem in starting http server. Server handlers failed
        at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:819)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer.start(NameNodeHttpServer.java:142)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.startHttpServer(NameNode.java:704)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:591)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:763)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:747)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1443)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1104)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:975)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:804)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:465)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:424)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyIsHot.testStandbyIsHot(TestStandbyIsHot.java:74)


REGRESSION:  
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyIsHot.testDatanodeRestarts

Error Message:
Problem in starting http server. Server handlers failed

Stack Trace:
java.io.IOException: Problem in starting http server. Server handlers failed
        at org.apache.hadoop.http.HttpServer2.start(HttpServer2.java:819)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer.start(NameNodeHttpServer.java:142)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.startHttpServer(NameNode.java:704)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:591)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:763)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:747)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1443)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1104)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:975)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:804)
        at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:465)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:424)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyIsHot.testDatanodeRestarts(TestStandbyIsHot.java:148)


FAILED:  
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencing.testQueueingWithAppend

Error Message:
expected:<18> but was:<12>

Stack Trace:
java.lang.AssertionError: expected:<18> but was:<12>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:743)
        at org.junit.Assert.assertEquals(Assert.java:118)
        at org.junit.Assert.assertEquals(Assert.java:555)
        at org.junit.Assert.assertEquals(Assert.java:542)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencing.testQueueingWithAppend(TestDNFencing.java:448)


FAILED:  
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencingWithReplication.testFencingStress

Error Message:
Deferred

Stack Trace:
java.lang.RuntimeException: Deferred
        at 
org.apache.hadoop.test.MultithreadedTestUtil$TestContext.checkException(MultithreadedTestUtil.java:130)
        at 
org.apache.hadoop.test.MultithreadedTestUtil$TestContext.stop(MultithreadedTestUtil.java:166)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencingWithReplication.testFencingStress(TestDNFencingWithReplication.java:135)
Caused by: java.io.IOException: Timed out waiting for 2 replicas on path 
/test-14
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencingWithReplication$ReplicationToggler.waitForReplicas(TestDNFencingWithReplication.java:96)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestDNFencingWithReplication$ReplicationToggler.doAnAction(TestDNFencingWithReplication.java:78)
        at 
org.apache.hadoop.test.MultithreadedTestUtil$RepeatingTestThread.doWork(MultithreadedTestUtil.java:222)
        at 
org.apache.hadoop.test.MultithreadedTestUtil$TestingThread.run(MultithreadedTestUtil.java:189)


Reply via email to