You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@hbase.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/06/15 04:48:55 UTC

Build failed in Jenkins: HBase-1.2 » latest1.8,Hadoop #9

See <https://builds.apache.org/job/HBase-1.2/jdk=latest1.8,label=Hadoop/9/changes>

Changes:

[zhangduo] HBASE-13899 Jacoco instrumentation fails under jdk8

------------------------------------------
[...truncated 58562 lines...]
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751208ab0> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"main-BucketCacheWriter-4" daemon prio=10 tid=0x00007fae40ba7000 nid=0x54c6 waiting on condition [0x00007fadc05de000]
   java.lang.Thread.State: WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751208c48> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
	at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getRAMQueueEntries(BucketCache.java:884)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache$WriterThread.run(BucketCache.java:741)
	at java.lang.Thread.run(Thread.java:745)

"main-BucketCacheWriter-3" daemon prio=10 tid=0x00007fae40ba5000 nid=0x54c4 waiting on condition [0x00007fadc06df000]
   java.lang.Thread.State: WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751208d58> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
	at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getRAMQueueEntries(BucketCache.java:884)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache$WriterThread.run(BucketCache.java:741)
	at java.lang.Thread.run(Thread.java:745)

"main-BucketCacheWriter-2" daemon prio=10 tid=0x00007fae40ba3000 nid=0x54c3 waiting on condition [0x00007fadc07e0000]
   java.lang.Thread.State: WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751208e68> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
	at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getRAMQueueEntries(BucketCache.java:884)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache$WriterThread.run(BucketCache.java:741)
	at java.lang.Thread.run(Thread.java:745)

"main-BucketCacheWriter-1" daemon prio=10 tid=0x00007fae40ba1000 nid=0x54c2 waiting on condition [0x00007fadf8b9e000]
   java.lang.Thread.State: WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751208f78> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
	at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getRAMQueueEntries(BucketCache.java:884)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache$WriterThread.run(BucketCache.java:741)
	at java.lang.Thread.run(Thread.java:745)

"main-BucketCacheWriter-0" daemon prio=10 tid=0x00007fae40b98000 nid=0x54bc waiting on condition [0x00007fadf8c9f000]
   java.lang.Thread.State: WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751212f88> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
	at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache.getRAMQueueEntries(BucketCache.java:884)
	at org.apache.hadoop.hbase.io.hfile.bucket.BucketCache$WriterThread.run(BucketCache.java:741)
	at java.lang.Thread.run(Thread.java:745)

"LruBlockCacheStatsExecutor" daemon prio=10 tid=0x00007fae40a2a000 nid=0x5432 waiting on condition [0x00007fadf8da0000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x0000000751230fb0> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"main.LruBlockCache.EvictionThread" daemon prio=10 tid=0x00007fae40a28000 nid=0x5430 in Object.wait() [0x00007fadf8ea1000]
   java.lang.Thread.State: TIMED_WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	at org.apache.hadoop.hbase.io.hfile.LruBlockCache$EvictionThread.run(LruBlockCache.java:803)
	- locked <0x0000000751231148> (a org.apache.hadoop.hbase.io.hfile.LruBlockCache$EvictionThread)
	at java.lang.Thread.run(Thread.java:745)

"LruBlockCacheStatsExecutor" daemon prio=10 tid=0x00007fae40a27000 nid=0x5426 waiting on condition [0x00007fadf8fa2000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007512311c8> (a java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
	at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"main.LruBlockCache.EvictionThread" daemon prio=10 tid=0x00007fae40a22800 nid=0x5425 in Object.wait() [0x00007fadf90a3000]
   java.lang.Thread.State: TIMED_WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	at org.apache.hadoop.hbase.io.hfile.LruBlockCache$EvictionThread.run(LruBlockCache.java:803)
	- locked <0x0000000751231360> (a org.apache.hadoop.hbase.io.hfile.LruBlockCache$EvictionThread)
	at java.lang.Thread.run(Thread.java:745)

"process reaper" daemon prio=10 tid=0x00007fae409df000 nid=0x5402 waiting on condition [0x00007fadf90dc000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007512313e0> (a java.util.concurrent.SynchronousQueue$TransferStack)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
	at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
	at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"Service Thread" daemon prio=10 tid=0x00007fae40293800 nid=0x5041 runnable [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"C2 CompilerThread1" daemon prio=10 tid=0x00007fae40291000 nid=0x5040 waiting on condition [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"C2 CompilerThread0" daemon prio=10 tid=0x00007fae4028e000 nid=0x503f waiting on condition [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"Signal Dispatcher" daemon prio=10 tid=0x00007fae4028c000 nid=0x503e runnable [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"Finalizer" daemon prio=10 tid=0x00007fae4026b800 nid=0x502b in Object.wait() [0x00007fae387bc000]
   java.lang.Thread.State: WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
	- locked <0x000000075107a090> (a java.lang.ref.ReferenceQueue$Lock)
	at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
	at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:209)

"Reference Handler" daemon prio=10 tid=0x00007fae40269000 nid=0x502a in Object.wait() [0x00007fae388bd000]
   java.lang.Thread.State: WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	at java.lang.Object.wait(Object.java:503)
	at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:133)
	- locked <0x0000000751079ba8> (a java.lang.ref.Reference$Lock)

"main" prio=10 tid=0x00007fae4000a800 nid=0x500d runnable [0x00007fae47273000]
   java.lang.Thread.State: RUNNABLE
	at java.lang.Thread.dumpThreads(Native Method)
	at java.lang.Thread.getAllStackTraces(Thread.java:1640)
	at org.apache.hadoop.hbase.ResourceCheckerJUnitListener$ThreadResourceAnalyzer.getVal(ResourceCheckerJUnitListener.java:53)
	at org.apache.hadoop.hbase.ResourceChecker.fill(ResourceChecker.java:114)
	at org.apache.hadoop.hbase.ResourceChecker.fillInit(ResourceChecker.java:103)
	at org.apache.hadoop.hbase.ResourceChecker.start(ResourceChecker.java:186)
	at org.apache.hadoop.hbase.ResourceCheckerJUnitListener.start(ResourceCheckerJUnitListener.java:156)
	at org.apache.hadoop.hbase.ResourceCheckerJUnitListener.testStarted(ResourceCheckerJUnitListener.java:179)
	at org.junit.runner.notification.RunNotifier$3.notifyListener(RunNotifier.java:115)
	at org.junit.runner.notification.RunNotifier$SafeNotifier.run(RunNotifier.java:61)
	- locked <0x00000007513c9368> (a java.util.Collections$SynchronizedRandomAccessList)
	at org.junit.runner.notification.RunNotifier.fireTestStarted(RunNotifier.java:112)
	at org.junit.internal.runners.model.EachTestNotifier.fireTestStarted(EachTestNotifier.java:43)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:269)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runners.Suite.runChild(Suite.java:127)
	at org.junit.runners.Suite.runChild(Suite.java:26)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runners.Suite.runChild(Suite.java:127)
	at org.junit.runners.Suite.runChild(Suite.java:26)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:160)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:138)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.createRequestAndRun(JUnitCoreWrapper.java:107)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.executeEager(JUnitCoreWrapper.java:77)
	at org.apache.maven.surefire.junitcore.JUnitCoreWrapper.execute(JUnitCoreWrapper.java:53)
	at org.apache.maven.surefire.junitcore.JUnitCoreProvider.invoke(JUnitCoreProvider.java:144)
	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:203)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:155)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)

"VM Thread" prio=10 tid=0x00007fae40265000 nid=0x5023 runnable 

"GC task thread#0 (ParallelGC)" prio=10 tid=0x00007fae40020800 nid=0x500f runnable 

"GC task thread#1 (ParallelGC)" prio=10 tid=0x00007fae40022000 nid=0x5010 runnable 

"GC task thread#2 (ParallelGC)" prio=10 tid=0x00007fae40024000 nid=0x5011 runnable 

"GC task thread#3 (ParallelGC)" prio=10 tid=0x00007fae40026000 nid=0x5012 runnable 

"GC task thread#4 (ParallelGC)" prio=10 tid=0x00007fae40028000 nid=0x5013 runnable 

"GC task thread#5 (ParallelGC)" prio=10 tid=0x00007fae40029800 nid=0x5015 runnable 

"GC task thread#6 (ParallelGC)" prio=10 tid=0x00007fae4002b800 nid=0x5016 runnable 

"GC task thread#7 (ParallelGC)" prio=10 tid=0x00007fae4002d800 nid=0x5017 runnable 

"GC task thread#8 (ParallelGC)" prio=10 tid=0x00007fae4002f000 nid=0x5018 runnable 

"GC task thread#9 (ParallelGC)" prio=10 tid=0x00007fae40031000 nid=0x5019 runnable 

"GC task thread#10 (ParallelGC)" prio=10 tid=0x00007fae40033000 nid=0x501a runnable 

"GC task thread#11 (ParallelGC)" prio=10 tid=0x00007fae40035000 nid=0x501b runnable 

"GC task thread#12 (ParallelGC)" prio=10 tid=0x00007fae40036800 nid=0x501c runnable 

"VM Periodic Task Thread" prio=10 tid=0x00007fae4029e000 nid=0x5042 waiting on condition 

JNI global references: 312

+ echo '************ END  zombies jstack extract'
************ END  zombies jstack extract
+ JIRA_COMMENT='

     {color:red}-1 core zombie tests{color}.  There are 1 zombie test(s): 	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWriteInternals(TestCacheOnWrite.java:269)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWrite(TestCacheOnWrite.java:487)'
+ BAD=1
+ jps
+ grep surefirebooter
+ xargs kill -9
+ cut -d ' ' -f 1
POST BUILD TASK : SUCCESS
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-1.2 » latest1.8,Hadoop #8
Archived 2095 artifacts
Archive block size is 32768
Received 45 blocks and 501168377 bytes
Compression is 0.3%
Took 4 min 30 sec

Jenkins build is back to normal : HBase-1.2 » latest1.8,Hadoop #11

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/HBase-1.2/jdk=latest1.8,label=Hadoop/11/changes>


Build failed in Jenkins: HBase-1.2 » latest1.8,Hadoop #10

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/HBase-1.2/jdk=latest1.8,label=Hadoop/10/changes>

Changes:

[matteo.bertozzi] HBASE-13894 Avoid visitor alloc each call of ByteBufferArray get/putMultiple()

------------------------------------------
[...truncated 45502 lines...]
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter
Running org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Running org.apache.hadoop.hbase.filter.TestScanRowPrefix
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.498 sec - in org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.098 sec - in org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.242 sec - in org.apache.hadoop.hbase.filter.TestScanRowPrefix
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.644 sec - in org.apache.hadoop.hbase.fs.TestBlockReorder
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.612 sec - in org.apache.hadoop.hbase.replication.TestReplicationSyncUpTool
Running org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.filter.TestFilterWrapper
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.filter.TestMultiRowRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.813 sec - in org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.531 sec - in org.apache.hadoop.hbase.filter.TestFilterWrapper
Running org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint
Tests run: 19, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.099 sec - in org.apache.hadoop.hbase.filter.TestMultiRowRangeFilter
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestHLogRecordReader
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.143 sec - in org.apache.hadoop.hbase.mapreduce.TestHLogRecordReader
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestWALPlayer
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.953 sec - in org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestMultiTableInputFormat
Running org.apache.hadoop.hbase.mapreduce.TestHashTable
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 58.297 sec - in org.apache.hadoop.hbase.mapreduce.TestWALPlayer
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.267 sec - in org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesUseSecurityEndPoint
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestImportTSVWithOperationAttributes
Running org.apache.hadoop.hbase.mapreduce.TestWALRecordReader
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.792 sec - in org.apache.hadoop.hbase.mapreduce.TestWALRecordReader
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 77.044 sec - in org.apache.hadoop.hbase.mapreduce.TestHashTable
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestSyncTable
Running org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFilesSplitRecovery
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 34.099 sec - in org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFilesSplitRecovery
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestTimeRangeMapRed
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 113.987 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTSVWithOperationAttributes
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 176.595 sec - in org.apache.hadoop.hbase.mapreduce.TestMultiTableInputFormat
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 113.182 sec - in org.apache.hadoop.hbase.mapreduce.TestSyncTable
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.225 sec - in org.apache.hadoop.hbase.mapreduce.TestTimeRangeMapRed
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestImportTSVWithTTLs
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestCellCounter
Running org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan1
Tests run: 13, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 313.958 sec - in org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 61.241 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTSVWithTTLs
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles
Running org.apache.hadoop.hbase.mapreduce.TestHRegionPartitioner
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.673 sec - in org.apache.hadoop.hbase.mapreduce.TestHRegionPartitioner
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 105.937 sec - in org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFiles
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestTableInputFormat
Tests run: 16, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 114.043 sec - in org.apache.hadoop.hbase.mapreduce.TestSecureLoadIncrementalHFiles
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 214.682 sec - in org.apache.hadoop.hbase.mapreduce.TestCellCounter
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan2
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 230.633 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan1
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestImportExport
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 320.879 sec - in org.apache.hadoop.hbase.mapreduce.TestTableSnapshotInputFormat
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestRowCounter
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.775 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormat
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesSplitRecovery
Running org.apache.hadoop.hbase.mapreduce.TestCopyTable
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 39.222 sec - in org.apache.hadoop.hbase.mapreduce.TestLoadIncrementalHFilesSplitRecovery
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat2
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 153.112 sec - in org.apache.hadoop.hbase.mapreduce.TestCopyTable
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 243.955 sec - in org.apache.hadoop.hbase.mapreduce.TestTableInputFormatScan2
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 209.441 sec - in org.apache.hadoop.hbase.mapreduce.TestRowCounter
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestMultithreadedTableMapper
Running org.apache.hadoop.hbase.mapreduce.TestImportTSVWithVisibilityLabels
Running org.apache.hadoop.hbase.mapreduce.TestImportTsv
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 107.731 sec - in org.apache.hadoop.hbase.mapreduce.TestMultithreadedTableMapper
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestTableMapReduce
Tests run: 13, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 304.672 sec - in org.apache.hadoop.hbase.mapreduce.TestHFileOutputFormat2
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 431.752 sec - in org.apache.hadoop.hbase.mapreduce.TestImportExport
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=256m; support was removed in 8.0
Running org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 226.34 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTSVWithVisibilityLabels
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 173.014 sec - in org.apache.hadoop.hbase.mapreduce.TestTableMapReduce
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 308.215 sec - in org.apache.hadoop.hbase.mapreduce.TestImportTsv
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 164.672 sec - in org.apache.hadoop.hbase.mapreduce.TestMultiTableSnapshotInputFormat

Results :



Flaked tests: 
org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent(org.apache.hadoop.hbase.client.TestSnapshotCloneIndependence)
  Run 1: TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent:182->runTestSnapshotDeleteIndependent:425 expected:<17576> but was:<14046>
  Run 2: TestSnapshotCloneIndependence.testOnlineSnapshotDeleteIndependent:182->runTestSnapshotDeleteIndependent:425 expected:<17576> but was:<14046>
  Run 3: PASS

org.apache.hadoop.hbase.regionserver.TestSplitWalDataLoss.test(org.apache.hadoop.hbase.regionserver.TestSplitWalDataLoss)
  Run 1: TestSplitWalDataLoss.test:121 » RetriesExhaustedWithDetails Failed 1 action: T...
  Run 2: PASS


Tests run: 2644, Failures: 0, Errors: 0, Skipped: 20, Flakes: 2

[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] HBase ............................................. SUCCESS [52.387s]
[INFO] HBase - Checkstyle ................................ SUCCESS [5.580s]
[INFO] HBase - Annotations ............................... SUCCESS [1.315s]
[INFO] HBase - Protocol .................................. SUCCESS [15.440s]
[INFO] HBase - Common .................................... SUCCESS [1:49.666s]
[INFO] HBase - Procedure ................................. SUCCESS [1:48.542s]
[INFO] HBase - Client .................................... SUCCESS [1:25.465s]
[INFO] HBase - Hadoop Compatibility ...................... SUCCESS [7.825s]
[INFO] HBase - Hadoop Two Compatibility .................. SUCCESS [9.738s]
[INFO] HBase - Prefix Tree ............................... SUCCESS [7.196s]
[INFO] HBase - Server .................................... FAILURE [1:30:11.647s]
[INFO] HBase - Testing Util .............................. SKIPPED
[INFO] HBase - Thrift .................................... SKIPPED
[INFO] HBase - Rest ...................................... SKIPPED
[INFO] HBase - Shell ..................................... SKIPPED
[INFO] HBase - Integration Tests ......................... SKIPPED
[INFO] HBase - Examples .................................. SKIPPED
[INFO] HBase - Assembly .................................. SKIPPED
[INFO] HBase - Shaded .................................... SKIPPED
[INFO] HBase - Shaded - Client ........................... SKIPPED
[INFO] HBase - Shaded - Server ........................... SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 1:37:12.818s
[INFO] Finished at: Mon Jun 15 18:44:25 UTC 2015
[INFO] Final Memory: 100M/754M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.18:test (secondPartTestsExecution) on project hbase-server: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hbase-server
Build step 'Invoke top-level Maven targets' marked build as failure
Performing Post build task...
Match found for :.* : True
Logical operation result is TRUE
Running script  :   ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
  if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
    #It seems sometimes the tests are not dying immediately. Let's give them 10s
    echo "Suspicious java process found - waiting 10s to see if there are just slow to stop"
    sleep 10   
    ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
    if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
      echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire but survived"
      echo "************ BEGIN zombies jstack extract"
      ZB_STACK=`jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test" | grep "\.java"`
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack
      echo "************ END  zombies jstack extract"
      JIRA_COMMENT="$JIRA_COMMENT

     {color:red}-1 core zombie tests{color}.  There are ${ZOMBIE_TESTS_COUNT} zombie test(s): ${ZB_STACK}"
      BAD=1
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9
    else
      echo "We're ok: there is no zombie test, but some tests took some time to stop"
    fi
  else
    echo "We're ok: there is no zombie test"
  fi
[Hadoop] $ /bin/bash -xe /tmp/hudson6810092719916393652.sh
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=1
+ [[ 1 != 0 ]]
+ echo 'Suspicious java process found - waiting 10s to see if there are just slow to stop'
Suspicious java process found - waiting 10s to see if there are just slow to stop
+ sleep 10
++ grep surefirebooter
++ wc -l
++ jps
+ ZOMBIE_TESTS_COUNT=0
+ [[ 0 != 0 ]]
+ echo 'We'\''re ok: there is no zombie test, but some tests took some time to stop'
We're ok: there is no zombie test, but some tests took some time to stop
POST BUILD TASK : SUCCESS
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-1.2 » latest1.8,Hadoop #8
Archived 2099 artifacts
Archive block size is 32768
Received 39 blocks and 1207333237 bytes
Compression is 0.1%
Took 5 min 13 sec