See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/2622/changes>

Changes:

[umamahesh] HDFS-9472. concat() API does not give proper exception messages on

------------------------------------------
[...truncated 7763 lines...]
        at 
org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:129)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2269)
"pool-46-thread-1"  prio=5 tid=170 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at 
java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Client (1700875334) connection to /127.0.0.1:48263 from 
jenkins/localh...@example.com" daemon prio=5 tid=230 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.ipc.Client$Connection.waitForWork(Client.java:959)
        at org.apache.hadoop.ipc.Client$Connection.run(Client.java:1004)
"pool-49-thread-1"  prio=5 tid=188 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at 
java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Client (1700875334) connection to localhost/127.0.0.1:51501 from 
jenkins/localh...@example.com" daemon prio=5 tid=199 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.ipc.Client$Connection.waitForWork(Client.java:959)
        at org.apache.hadoop.ipc.Client$Connection.run(Client.java:1004)
"IPC Server handler 1 on 41254" daemon prio=5 tid=190 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at 
java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at 
org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:129)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2269)
"IPC Server idle connection scanner for port 56021" daemon prio=5 tid=32 
timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server Responder" daemon prio=5 tid=48 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.apache.hadoop.ipc.Server$Responder.doRunLoop(Server.java:977)
        at org.apache.hadoop.ipc.Server$Responder.run(Server.java:960)
"Logger channel (from parallel executor) to /127.0.0.1:48263" daemon prio=5 
tid=115 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
        at 
java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
        at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
        at 
java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 0 on 48263" daemon prio=5 tid=65 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at 
java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at 
org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:129)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2269)
"Timer-1" daemon prio=5 tid=28 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"Timer for 'JournalNode' metrics system" daemon prio=5 tid=21 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"Timer-4" daemon prio=5 tid=43 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"pool-55-thread-1"  prio=5 tid=216 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at 
java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at 
java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at 
java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at 
java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at 
java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at 
java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
Tests run: 2, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 87.388 sec <<< 
FAILURE! - in org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM
testSecureMode(org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM)  Time 
elapsed: 30.021 sec  <<< ERROR!
java.lang.Exception: test timed out after 30000 milliseconds
        at java.lang.Thread.sleep(Native Method)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.waitClusterUp(MiniDFSCluster.java:1339)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.restartNameNode(MiniDFSCluster.java:2021)
        at 
org.apache.hadoop.hdfs.MiniDFSCluster.restartNameNode(MiniDFSCluster.java:1982)
        at 
org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM.restartNameNode(TestSecureNNWithQJM.java:203)
        at 
org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM.doNNWithQJMTest(TestSecureNNWithQJM.java:185)
        at 
org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM.testSecureMode(TestSecureNNWithQJM.java:165)

"IPC Server handler 3 on 48263" daemon pr
Running org.apache.hadoop.hdfs.qjournal.server.TestJournalNode
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.519 sec - in 
org.apache.hadoop.hdfs.qjournal.server.TestJournalNode
Running org.apache.hadoop.hdfs.qjournal.server.TestJournal
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.148 sec - in 
org.apache.hadoop.hdfs.qjournal.server.TestJournal
Running org.apache.hadoop.hdfs.qjournal.server.TestJournalNodeMXBean
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.64 sec - in 
org.apache.hadoop.hdfs.qjournal.server.TestJournalNodeMXBean
Running org.apache.hadoop.hdfs.qjournal.client.TestQuorumJournalManagerUnit
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.725 sec - in 
org.apache.hadoop.hdfs.qjournal.client.TestQuorumJournalManagerUnit
Running org.apache.hadoop.hdfs.qjournal.client.TestQuorumJournalManager
Tests run: 21, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 68.458 sec - 
in org.apache.hadoop.hdfs.qjournal.client.TestQuorumJournalManager
Running org.apache.hadoop.hdfs.qjournal.client.TestSegmentRecoveryComparator
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.776 sec - in 
org.apache.hadoop.hdfs.qjournal.client.TestSegmentRecoveryComparator
Running org.apache.hadoop.hdfs.qjournal.client.TestIPCLoggerChannel
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.182 sec - in 
org.apache.hadoop.hdfs.qjournal.client.TestIPCLoggerChannel
Running org.apache.hadoop.hdfs.qjournal.client.TestEpochsAreUnique
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.459 sec - in 
org.apache.hadoop.hdfs.qjournal.client.TestEpochsAreUnique
Running org.apache.hadoop.hdfs.qjournal.client.TestQJMWithFaults
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 375.512 sec - 
in org.apache.hadoop.hdfs.qjournal.client.TestQJMWithFaults
Running org.apache.hadoop.hdfs.qjournal.client.TestQuorumCall
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.839 sec - in 
org.apache.hadoop.hdfs.qjournal.client.TestQuorumCall
Running org.apache.hadoop.hdfs.qjournal.TestMiniJournalCluster
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.196 sec - in 
org.apache.hadoop.hdfs.qjournal.TestMiniJournalCluster
Running org.apache.hadoop.hdfs.qjournal.TestNNWithQJM
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.469 sec - in 
org.apache.hadoop.hdfs.qjournal.TestNNWithQJM
Running org.apache.hadoop.hdfs.TestConnCache
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.536 sec - in 
org.apache.hadoop.hdfs.TestConnCache
Running org.apache.hadoop.hdfs.TestDFSStorageStateRecovery
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 168.955 sec - 
in org.apache.hadoop.hdfs.TestDFSStorageStateRecovery
Running org.apache.hadoop.hdfs.TestDFSInputStream
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 29.837 sec - in 
org.apache.hadoop.hdfs.TestDFSInputStream
Running org.apache.hadoop.hdfs.TestFileAppend
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 54.028 sec - 
in org.apache.hadoop.hdfs.TestFileAppend
Running org.apache.hadoop.hdfs.TestFileAppend3
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 103.482 sec - 
in org.apache.hadoop.hdfs.TestFileAppend3
Running org.apache.hadoop.hdfs.TestClientReportBadBlock
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.766 sec - in 
org.apache.hadoop.hdfs.TestClientReportBadBlock
Running org.apache.hadoop.hdfs.TestReadStripedFileWithDecoding

Results :

Failed tests: 
  TestLeaseRecovery2.tearDown:105 Test resulted in an unexpected exit
  TestBlockReplacement.testDeletedBlockWhenAddBlockIsInEdit:435 The block 
should be only on 1 datanode  expected:<1> but was:<2>

Tests in error: 
  
TestLeaseRecovery2.testHardLeaseRecoveryAfterNameNodeRestart2:427->hardLeaseRecoveryRestartHelper:494
 » Exit
  
TestLeaseRecovery2.testHardLeaseRecoveryWithRenameAfterNameNodeRestart:433->hardLeaseRecoveryRestartHelper:446
 » EOF
  TestLeaseRecovery2.testLeaseRecoverByAnotherUser:159 » IllegalState Lease 
moni...
  TestLeaseRecovery2.testHardLeaseRecovery:276 » Connect Call From 
asf909.gq1.yg...
  TestDFSClientRetries.testIdempotentAllocateBlockAndClose » NotReplicatedYet 
No...
  
TestDirectoryScanner.testThrottling:580->createFile:104->Object.wait:503->Object.wait:-2
 » 
  
TestSecureNNWithQJM.testSecureMode:165->doNNWithQJMTest:185->restartNameNode:203
 » 

Tests run: 3202, Failures: 2, Errors: 7, Skipped: 8

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project 
---
[INFO] Deleting 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project 
---
[INFO] Executing tasks

main:
    [mkdir] Created dir: 
<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ 
hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project 
---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ 
hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [08:18 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  06:13 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.226 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 06:21 h
[INFO] Finished at: 2015-12-11T18:44:05+00:00
[INFO] Final Memory: 72M/652M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on 
project hadoop-hdfs: ExecutionException: java.lang.RuntimeException: 
java.lang.RuntimeException: java.io.IOException: Stream Closed -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

Reply via email to