hbase-builds mailing list archives

Site index · List index
Message view « Date » · « Thread »
Top « Date » · « Thread »
From Apache Jenkins Server <jenk...@builds.apache.org>
Subject Build failed in Jenkins: HBase-1.2 » latest1.7,Hadoop #124
Date Fri, 21 Aug 2015 01:07:49 GMT
See <https://builds.apache.org/job/HBase-1.2/jdk=latest1.7,label=Hadoop/124/changes>

Changes:

[busbey] HBASE-14251 ensure javadoc jars have LICENSE/NOTICE that match content.

------------------------------------------
[...truncated 46393 lines...]

Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 276.831 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan2
Running org.apache.hadoop.hbase.mapreduce.TestHRegionPartitioner
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.789 sec - in org.apache.hadoop.hbase.mapreduce.TestHRegionPartitioner
Running org.apache.hadoop.hbase.mapreduce.TestRowCounter
Tests run: 5, Failures: 0, Errors: 2, Skipped: 0, Time elapsed: 125.616 sec <<< FAILURE!
- in org.apache.hadoop.hbase.mapreduce.TestCopyTable
testCopyTable(org.apache.hadoop.hbase.mapreduce.TestCopyTable)  Time elapsed: 3.035 sec  <<<
ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:714)
	at java.util.concurrent.ThreadPoolExecutor.addWorker(ThreadPoolExecutor.java:949)
	at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1360)
	at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:110)
	at org.apache.hadoop.hbase.client.AsyncProcess$AsyncRequestFutureImpl.sendMultiAction(AsyncProcess.java:980)
	at org.apache.hadoop.hbase.client.AsyncProcess$AsyncRequestFutureImpl.access$000(AsyncProcess.java:575)
	at org.apache.hadoop.hbase.client.AsyncProcess.submitMultiActions(AsyncProcess.java:424)
	at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:404)
	at org.apache.hadoop.hbase.client.AsyncProcess.submit(AsyncProcess.java:321)
	at org.apache.hadoop.hbase.client.BufferedMutatorImpl.backgroundFlushCommits(BufferedMutatorImpl.java:206)
	at org.apache.hadoop.hbase.client.BufferedMutatorImpl.flush(BufferedMutatorImpl.java:183)
	at org.apache.hadoop.hbase.client.HTable.flushCommits(HTable.java:1430)
	at org.apache.hadoop.hbase.client.HTable.put(HTable.java:1021)
	at org.apache.hadoop.hbase.mapreduce.TestCopyTable.doCopyTableTest(TestCopyTable.java:88)
	at org.apache.hadoop.hbase.mapreduce.TestCopyTable.testCopyTable(TestCopyTable.java:123)

testCopyTableWithBulkload(org.apache.hadoop.hbase.mapreduce.TestCopyTable)  Time elapsed:
0.487 sec  <<< ERROR!
org.apache.hadoop.hbase.TableExistsException: testCopyTable1
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at org.apache.hadoop.ipc.RemoteException.instantiateException(RemoteException.java:106)
	at org.apache.hadoop.ipc.RemoteException.unwrapRemoteException(RemoteException.java:95)
	at org.apache.hadoop.hbase.util.ForeignExceptionUtil.toIOException(ForeignExceptionUtil.java:45)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.convertResult(HBaseAdmin.java:4234)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.waitProcedureResult(HBaseAdmin.java:4192)
	at org.apache.hadoop.hbase.client.HBaseAdmin$ProcedureFuture.get(HBaseAdmin.java:4125)
	at org.apache.hadoop.hbase.client.HBaseAdmin.createTable(HBaseAdmin.java:574)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTable(HBaseTestingUtility.java:1412)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTable(HBaseTestingUtility.java:1459)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTable(HBaseTestingUtility.java:1350)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTable(HBaseTestingUtility.java:1326)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTable(HBaseTestingUtility.java:1282)
	at org.apache.hadoop.hbase.mapreduce.TestCopyTable.doCopyTableTest(TestCopyTable.java:81)
	at org.apache.hadoop.hbase.mapreduce.TestCopyTable.testCopyTableWithBulkload(TestCopyTable.java:131)
Caused by: org.apache.hadoop.ipc.RemoteException: testCopyTable1
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.prepareCreate(CreateTableProcedure.java:287)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:107)
	at org.apache.hadoop.hbase.master.procedure.CreateTableProcedure.executeFromState(CreateTableProcedure.java:58)
	at org.apache.hadoop.hbase.procedure2.StateMachineProcedure.execute(StateMachineProcedure.java:119)
	at org.apache.hadoop.hbase.procedure2.Procedure.doExecute(Procedure.java:442)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execProcedure(ProcedureExecutor.java:987)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:778)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.execLoop(ProcedureExecutor.java:731)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor.access$400(ProcedureExecutor.java:72)
	at org.apache.hadoop.hbase.procedure2.ProcedureExecutor$2.run(ProcedureExecutor.java:476)

Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 162.014 sec - in org.apache.hadoop.hbase.mapreduce.TestWALPlayer
Running org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFilesSplitRecovery
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 260.444 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan1
Running org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesSplitRecovery
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 23.46 sec - in org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFilesSplitRecovery
Running org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat2
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.707 sec - in org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesSplitRecovery
Running org.apache.hadoop.hbase.mapreduce.TestHLogRecordReader
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.077 sec - in org.apache.hadoop.hbase.mapreduce.TestHLogRecordReader
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 243.195 sec - in org.apache.hadoop.hbase.mapreduce.TestCopyTable
Running org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 312.379 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTsv
Running org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles
Running org.apache.hadoop.hbase.mapreduce.TestCellCounter
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 221.974 sec - in org.apache.hadoop.hbase.mapreduce.TestRowCounter
Running org.apache.hadoop.hbase.mapreduce.TestImportExport
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 178.97 sec - in org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat
Running org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 187.876 sec - in org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles
Tests run: 13, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 298.212 sec - in org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat2
Running org.apache.hadoop.hbase.mapreduce.TestTableInputFormat
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 220.041 sec - in org.apache.hadoop.hbase.mapreduce.TestCellCounter
Running org.apache.hadoop.hbase.mapreduce.TestImportTSVWithVisibilityLabels
Running org.apache.hadoop.hbase.mapreduce.TestImportTSVWithOperationAttributes
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 111.945 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormat
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 96.534 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTSVWithOperationAttributes
Running org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat
Running org.apache.hadoop.hbase.mapreduce.TestMultiTableInputFormat
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 238.351 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTSVWithVisibilityLabels
Running org.apache.hadoop.hbase.mapreduce.TestHashTable
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 361.201 sec - in org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat

Results :


Tests in error: 
org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testLotsOfRegionReplicas[0](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.testLotsOfRegionReplicas:372 »  test timed out after...
  Run 2: TestRegionReplicaFailover.after:125 » TimeoutIO java.util.concurrent.TimeoutEx...

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testLotsOfRegionReplicas[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testPrimaryRegionKill[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testSecondaryRegionKillWhilePrimaryIsAcceptingWrites[0](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testSecondaryRegionKillWhilePrimaryIsAcceptingWrites[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testSecondaryRegionKill[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testSecondaryRegionWithEmptyRegion[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer

org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover.testSecondaryRegionWithNonEmptyRegion[1](org.apache.hadoop.hbase.regionserver.TestRegionReplicaFailover)
  Run 1: TestRegionReplicaFailover.before:116 » IllegalState A mini-cluster is already ...
  Run 2: TestRegionReplicaFailover.after:125 NullPointer


Flaked tests: 
org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent(org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence)
  Run 1: TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent:182->runTestSnapshotDeleteIndependent:425
expected:<17576> but was:<14046>
  Run 2: PASS

org.apache.hadoop.hbase.mapreduce.TestCopyTable.testCopyTable(org.apache.hadoop.hbase.mapreduce.TestCopyTable)
  Run 1: TestCopyTable.testCopyTable:123->doCopyTableTest:88 » OutOfMemory unable to cr...
  Run 2: PASS

org.apache.hadoop.hbase.mapreduce.TestCopyTable.testCopyTableWithBulkload(org.apache.hadoop.hbase.mapreduce.TestCopyTable)
  Run 1: TestCopyTable.testCopyTableWithBulkload:131->doCopyTableTest:81 » TableExists
...
  Run 2: PASS

org.apache.hadoop.hbase.mapreduce.TestWALPlayer.testWALPlayer(org.apache.hadoop.hbase.mapreduce.TestWALPlayer)
  Run 1: TestWALPlayer.testWALPlayer:120 expected:<0> but was:<1>
  Run 2: PASS


Tests run: 1685, Failures: 0, Errors: 8, Skipped: 18, Flakes: 4

[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache HBase ...................................... SUCCESS [2:15.318s]
[INFO] Apache HBase - Checkstyle ......................... SUCCESS [8.680s]
[INFO] Apache HBase - Resource Bundle .................... SUCCESS [0.200s]
[INFO] Apache HBase - Annotations ........................ SUCCESS [0.909s]
[INFO] Apache HBase - Protocol ........................... SUCCESS [21.497s]
[INFO] Apache HBase - Common ............................. SUCCESS [2:59.120s]
[INFO] Apache HBase - Procedure .......................... SUCCESS [1:52.307s]
[INFO] Apache HBase - Client ............................. SUCCESS [1:32.876s]
[INFO] Apache HBase - Hadoop Compatibility ............... SUCCESS [7.890s]
[INFO] Apache HBase - Hadoop Two Compatibility ........... SUCCESS [11.780s]
[INFO] Apache HBase - Prefix Tree ........................ SUCCESS [8.991s]
[INFO] Apache HBase - Server ............................. FAILURE [1:39:16.444s]
[INFO] Apache HBase - Testing Util ....................... SKIPPED
[INFO] Apache HBase - Thrift ............................. SKIPPED
[INFO] Apache HBase - Rest ............................... SKIPPED
[INFO] Apache HBase - Shell .............................. SKIPPED
[INFO] Apache HBase - Integration Tests .................. SKIPPED
[INFO] Apache HBase - Examples ........................... SKIPPED
[INFO] Apache HBase - Assembly ........................... SKIPPED
[INFO] Apache HBase - Shaded ............................. SKIPPED
[INFO] Apache HBase - Shaded - Client .................... SKIPPED
[INFO] Apache HBase - Shaded - Server .................... SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1:49:28.283s
[INFO] Finished at: Fri Aug 21 01:06:24 UTC 2015
[INFO] Final Memory: 393M/621M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.18:test (secondPartTestsExecution)
on project hbase-server: ExecutionException: java.lang.RuntimeException: java.lang.RuntimeException:
org.apache.maven.surefire.report.ReporterException: When writing xml report stdout/stderr:
/tmp/stderr8442503742624445095deferred (No such file or directory) -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following
articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hbase-server
Build step 'Invoke top-level Maven targets' marked build as failure
Performing Post build task...
Match found for :.* : True
Logical operation result is TRUE
Running script  :   ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
  if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
    #It seems sometimes the tests are not dying immediately. Let's give them 10s
    echo "Suspicious java process found - waiting 10s to see if there are just slow to stop"
    sleep 10   
    ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
    if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
      echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire
but survived"
      echo "************ BEGIN zombies jstack extract"
      ZB_STACK=`jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test"
| grep "\.java"`
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack
      echo "************ END  zombies jstack extract"
      JIRA_COMMENT="$JIRA_COMMENT

     {color:red}-1 core zombie tests{color}.  There are ${ZOMBIE_TESTS_COUNT} zombie test(s):
${ZB_STACK}"
      BAD=1
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9
    else
      echo "We're ok: there is no zombie test, but some tests took some time to stop"
    fi
  else
    echo "We're ok: there is no zombie test"
  fi
[Hadoop] $ /bin/bash -xe /tmp/hudson3430785802683276177.sh
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=15
+ [[ 15 != 0 ]]
+ echo 'Suspicious java process found - waiting 10s to see if there are just slow to stop'
Suspicious java process found - waiting 10s to see if there are just slow to stop
+ sleep 10
++ jps
++ wc -l
++ grep surefirebooter
+ ZOMBIE_TESTS_COUNT=10
+ [[ 10 != 0 ]]
+ echo 'There are 10 zombie tests, they should have been killed by surefire but survived'
There are 10 zombie tests, they should have been killed by surefire but survived
+ echo '************ BEGIN zombies jstack extract'
************ BEGIN zombies jstack extract
++ jps
++ grep surefirebooter
++ cut -d ' ' -f 1
++ xargs -n 1 jstack
++ grep '\.java'
++ grep .test
+ ZB_STACK=
POST BUILD TASK : FAILURE
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-1.2 » latest1.7,Hadoop #112
Archived 1744 artifacts
Archive block size is 32768
Received 15 blocks and 414761410 bytes
Compression is 0.1%
Took 2 min 0 sec

Mime
View raw message