See https://builds.apache.org/job/Hadoop-Hdfs-trunk/2294/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE 
###########################
[...truncated 7463 lines...]
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ 
hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project 
---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ 
hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ 
hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [03:16 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  02:43 h]
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.059 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 02:46 h
[INFO] Finished at: 2015-09-11T01:53:26+00:00
[INFO] Final Memory: 58M/668M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal 
org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on 
project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to 
/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports
 for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e 
switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please 
read the following articles:
[ERROR] [Help 1] 
http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Sending artifact delta relative to Hadoop-Hdfs-trunk #2288
Archived 1 artifacts
Archive block size is 32768
Received 0 blocks and 4568655 bytes
Compression is 0.0%
Took 3.5 sec
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) 
##############################
3 tests failed.
REGRESSION:  
org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure.testReplaceDatanodeOnFailure

Error Message:
expected:<3> but was:<2>

Stack Trace:
java.lang.AssertionError: expected:<3> but was:<2>
        at org.junit.Assert.fail(Assert.java:88)
        at org.junit.Assert.failNotEquals(Assert.java:743)
        at org.junit.Assert.assertEquals(Assert.java:118)
        at org.junit.Assert.assertEquals(Assert.java:555)
        at org.junit.Assert.assertEquals(Assert.java:542)
        at 
org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure$SlowWriter.checkReplication(TestReplaceDatanodeOnFailure.java:235)
        at 
org.apache.hadoop.hdfs.TestReplaceDatanodeOnFailure.testReplaceDatanodeOnFailure(TestReplaceDatanodeOnFailure.java:154)


REGRESSION:  org.apache.hadoop.hdfs.web.TestWebHDFS.testLargeDirectory

Error Message:
Read timed out

Stack Trace:
java.net.SocketTimeoutException: Read timed out
        at java.net.SocketInputStream.socketRead0(Native Method)
        at java.net.SocketInputStream.read(SocketInputStream.java:152)
        at java.net.SocketInputStream.read(SocketInputStream.java:122)
        at java.io.BufferedInputStream.fill(BufferedInputStream.java:235)
        at java.io.BufferedInputStream.read1(BufferedInputStream.java:275)
        at java.io.BufferedInputStream.read(BufferedInputStream.java:334)
        at sun.net.www.http.HttpClient.parseHTTPHeader(HttpClient.java:687)
        at sun.net.www.http.HttpClient.parseHTTP(HttpClient.java:633)
        at 
sun.net.www.protocol.http.HttpURLConnection.getInputStream(HttpURLConnection.java:1323)
        at 
java.net.HttpURLConnection.getResponseCode(HttpURLConnection.java:468)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem.validateResponse(WebHdfsFileSystem.java:363)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem.access$200(WebHdfsFileSystem.java:91)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:642)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:492)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:521)
        at java.security.AccessController.doPrivileged(Native Method)
        at javax.security.auth.Subject.doAs(Subject.java:415)
        at 
org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1667)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:517)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem.setPermission(WebHdfsFileSystem.java:1052)
        at 
org.apache.hadoop.hdfs.web.TestWebHDFS.testLargeDirectory(TestWebHDFS.java:258)


REGRESSION:  
org.apache.hadoop.hdfs.web.TestWebHDFSOAuth2.listStatusReturnsAsExpected

Error Message:
Unable to load OAuth2 connection factory.

Stack Trace:
java.io.IOException: Unable to load OAuth2 connection factory.
        at java.io.FileInputStream.open(Native Method)
        at java.io.FileInputStream.<init>(FileInputStream.java:146)
        at 
org.apache.hadoop.security.ssl.ReloadingX509TrustManager.loadTrustManager(ReloadingX509TrustManager.java:164)
        at 
org.apache.hadoop.security.ssl.ReloadingX509TrustManager.<init>(ReloadingX509TrustManager.java:81)
        at 
org.apache.hadoop.security.ssl.FileBasedKeyStoresFactory.init(FileBasedKeyStoresFactory.java:215)
        at org.apache.hadoop.security.ssl.SSLFactory.init(SSLFactory.java:131)
        at 
org.apache.hadoop.hdfs.web.URLConnectionFactory.newSslConnConfigurator(URLConnectionFactory.java:135)
        at 
org.apache.hadoop.hdfs.web.URLConnectionFactory.newOAuth2URLConnectionFactory(URLConnectionFactory.java:110)
        at 
org.apache.hadoop.hdfs.web.WebHdfsFileSystem.initialize(WebHdfsFileSystem.java:158)
        at 
org.apache.hadoop.hdfs.web.TestWebHDFSOAuth2.listStatusReturnsAsExpected(TestWebHDFSOAuth2.java:147)


Reply via email to