hadoop-hdfs-dev mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Hadoop-Hdfs-trunk-Java8 - Build # 719 - Still Failing
Date Mon, 21 Dec 2015 23:23:34 GMT
See https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/719/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 7704 lines...]
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/target
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project
---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project
---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project
---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [06:38 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:41 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.198 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:47 h
[INFO] Finished at: 2015-12-21T23:23:16+00:00
[INFO] Final Memory: 72M/1236M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test)
on project hadoop-hdfs: ExecutionException: java.lang.RuntimeException: java.lang.RuntimeException:
java.io.IOException: Stream Closed -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following
articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
4 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage

Error Message:
Cannot obtain block length for LocatedBlock{BP-2078839909-67.195.81.148-1450737639000:blk_7162739548153522810_1020;
getBlockSize()=1024; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[127.0.0.1:35067,DS-5fc29a41-8d89-4a00-aaed-04073251324d,DISK]]}

Stack Trace:
java.io.IOException: Cannot obtain block length for LocatedBlock{BP-2078839909-67.195.81.148-1450737639000:blk_7162739548153522810_1020;
getBlockSize()=1024; corrupt=false; offset=0; locs=[DatanodeInfoWithStorage[127.0.0.1:35067,DS-5fc29a41-8d89-4a00-aaed-04073251324d,DISK]]}
	at org.apache.hadoop.hdfs.DFSInputStream.readBlockLength(DFSInputStream.java:399)
	at org.apache.hadoop.hdfs.DFSInputStream.fetchLocatedBlocksAndGetLastBlockLength(DFSInputStream.java:343)
	at org.apache.hadoop.hdfs.DFSInputStream.openInfo(DFSInputStream.java:276)
	at org.apache.hadoop.hdfs.DFSInputStream.<init>(DFSInputStream.java:265)
	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1046)
	at org.apache.hadoop.hdfs.DFSClient.open(DFSClient.java:1011)
	at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.dfsOpenFileWithRetries(TestDFSUpgradeFromImage.java:177)
	at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyDir(TestDFSUpgradeFromImage.java:213)
	at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.verifyFileSystem(TestDFSUpgradeFromImage.java:228)
	at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.upgradeAndVerify(TestDFSUpgradeFromImage.java:600)
	at org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage(TestDFSUpgradeFromImage.java:622)


FAILED:  org.apache.hadoop.hdfs.TestDatanodeRegistration.testForcedRegistration

Error Message:
null

Stack Trace:
java.lang.AssertionError: null
	at org.junit.Assert.fail(Assert.java:86)
	at org.junit.Assert.assertTrue(Assert.java:41)
	at org.junit.Assert.assertTrue(Assert.java:52)
	at org.apache.hadoop.hdfs.TestDatanodeRegistration.testForcedRegistration(TestDatanodeRegistration.java:382)


FAILED:  org.apache.hadoop.hdfs.TestEncryptionZones.testStartFileRetry

Error Message:
test timed out after 120000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 120000 milliseconds
	at sun.misc.Unsafe.park(Native Method)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:175)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:836)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:997)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1304)
	at java.util.concurrent.CountDownLatch.await(CountDownLatch.java:231)
	at org.apache.hadoop.hdfs.TestEncryptionZones.testStartFileRetry(TestEncryptionZones.java:1094)


FAILED:  org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM.testSecureMode

Error Message:
test timed out after 30000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 30000 milliseconds
	at java.lang.Thread.sleep(Native Method)
	at org.mortbay.thread.QueuedThreadPool.doStop(QueuedThreadPool.java:435)
	at org.mortbay.component.AbstractLifeCycle.stop(AbstractLifeCycle.java:76)
	at org.mortbay.jetty.Server.doStop(Server.java:291)
	at org.mortbay.component.AbstractLifeCycle.stop(AbstractLifeCycle.java:76)
	at org.apache.hadoop.http.HttpServer2.stop(HttpServer2.java:952)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeHttpServer.stop(NameNodeHttpServer.java:199)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.stopHttpServer(NameNode.java:828)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.stopCommonServices(NameNode.java:785)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.stop(NameNode.java:953)
	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1913)
	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1882)
	at org.apache.hadoop.hdfs.MiniDFSCluster.shutdown(MiniDFSCluster.java:1875)
	at org.apache.hadoop.hdfs.qjournal.TestSecureNNWithQJM.shutdown(TestSecureNNWithQJM.java:156)



Mime
  • Unnamed multipart/mixed (inline, None, 0 bytes)
View raw message