See https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/1143/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE 
###########################
[...truncated 6021 lines...]
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project 
---
[INFO] Executing tasks

main:
    [mkdir] Created dir: 
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ 
hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable 
package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project 
---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ 
hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [03:58 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  03:39 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.081 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:43 h
[INFO] Finished at: 2016-04-27T05:21:05+00:00
[INFO] Final Memory: 56M/545M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on 
project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to 
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports
 for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) 
##############################
4 tests failed.
FAILED:  
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage

Error Message:
Cannot obtain block length for 
LocatedBlock{BP-1527029816-67.195.81.150-1461733827486:blk_7162739548153522810_1020;
 getBlockSize()=1024; corrupt=false; offset=0; 
locs=[DatanodeInfoWithStorage[127.0.0.1:41480,DS-7ef40010-4d39-4575-a445-347e4c6e5ca4,DISK]]}

Stack Trace:
java.io.IOException: Cannot obtain block length for 
LocatedBlock{BP-1527029816-67.195.81.150-1461733827486:blk_7162739548153522810_1020;
 getBlockSize()=1024; corrupt=false; offset=0; 
locs=[DatanodeInfoWithStorage[127.0.0.1:41480,DS-7ef40010-4d39-4575-a445-347e4c6e5ca4,DISK]]}
        at 
org.apache.hadoop.hdfs.DFSInputStream.readBlockLength(DFSInputStream.java:435)
        at 
org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:345)
        at 
org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:278)
        at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:267)
        at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1038)
        at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1003)
        at 
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.dfsOpenFileWithRetries(TestDFSUpgradeFromImage.java:178)
        at 
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyDir(TestDFSUpgradeFromImage.java:214)
        at 
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyFileSystem(TestDFSUpgradeFromImage.java:229)
        at 
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.upgradeAndVerify(TestDFSUpgradeFromImage.java:606)
        at 
org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage(TestDFSUpgradeFromImage.java:628)


FAILED:  org.apache.hadoop.hdfs.TestFileCorruption.testArrayOutOfBoundsException

Error Message:
org/apache/hadoop/util/IntrusiveCollection$IntrusiveIterator

Stack Trace:
java.lang.NoClassDefFoundError: 
org/apache/hadoop/util/IntrusiveCollection$IntrusiveIterator
        at 
org.apache.hadoop.util.IntrusiveCollection.iterator(IntrusiveCollection.java:213)
        at 
org.apache.hadoop.util.IntrusiveCollection.clear(IntrusiveCollection.java:368)
        at 
org.apache.hadoop.hdfs.server.blockmanagement.DatanodeManager.clearPendingCachingCommands(DatanodeManager.java:1721)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.stopActiveServices(FSNamesystem.java:1243)
        at 
org.apache.hadoop.hdfs.server.namenode.FSNamesystem.close(FSNamesystem.java:1588)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.stopCommonServices(NameNode.java:814)
        at 
org.apache.hadoop.hdfs.server.namenode.NameNode.stop(NameNode.java:993)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.stopAndJoinNameNode(MiniDFSCluster.java:1965)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1911)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875)
        at 
org.apache.hadoop.hdfs.TestFileCorruption.testArrayOutOfBoundsException(TestFileCorruption.java:166)


FAILED:  org.apache.hadoop.hdfs.TestFileCorruption.testFileCorruption

Error Message:
Test resulted in an unexpected exit

Stack Trace:
java.lang.AssertionError: Test resulted in an unexpected exit
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1895)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875)
        at 
org.apache.hadoop.hdfs.TestFileCorruption.testFileCorruption(TestFileCorruption.java:90)


FAILED:  
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyCheckpoints.testReadsAllowedDuringCheckpoint

Error Message:
expected:<200> but was:<500>

Stack Trace:
java.lang.AssertionError: expected:<200> but was:<500>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:743)
        at org.junit.Assert.assertEquals(Assert.java:118)
        at org.junit.Assert.assertEquals(Assert.java:555)
        at org.junit.Assert.assertEquals(Assert.java:542)
        at org.apache.hadoop.hdfs.DFSTestUtil.urlGetBytes(DFSTestUtil.java:882)
        at org.apache.hadoop.hdfs.DFSTestUtil.urlGet(DFSTestUtil.java:872)
        at 
org.apache.hadoop.hdfs.server.namenode.ha.TestStandbyCheckpoints.testReadsAllowedDuringCheckpoint(TestStandbyCheckpoints.java:449)


Reply via email to