You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@hbase.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/05/06 12:57:37 UTC

Build failed in Jenkins: HBase-0.98 #979

See <https://builds.apache.org/job/HBase-0.98/979/changes>

Changes:

[zhangduo] HBASE-13628 Use AtomicLong as size in BoundedConcurrentLinkedQueue

------------------------------------------
[...truncated 6867 lines...]
	- locked <0x00000007c54df0c8> (a sun.nio.ch.EPollSelectorImpl)
	at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
	at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:622)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:310)
	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
	at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
	at java.lang.Thread.run(Thread.java:745)

"nioEventLoopGroup-4-2" prio=10 tid=0x00007fa4a56d8000 nid=0x4155 runnable [0x00007fa486482000]
   java.lang.Thread.State: RUNNABLE
	at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
	at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
	at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
	at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
	- locked <0x00000007c549af08> (a io.netty.channel.nio.SelectedSelectionKeySet)
	- locked <0x00000007c549af28> (a java.util.Collections$UnmodifiableSet)
	- locked <0x00000007c549aec0> (a sun.nio.ch.EPollSelectorImpl)
	at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
	at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:622)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:310)
	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
	at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
	at java.lang.Thread.run(Thread.java:745)

"org.apache.hadoop.hdfs.PeerCache@7c61a600" daemon prio=10 tid=0x00007fa4a5ed3000 nid=0x4152 waiting on condition [0x00007fa484563000]
   java.lang.Thread.State: TIMED_WAITING (sleeping)
	at java.lang.Thread.sleep(Native Method)
	at org.apache.hadoop.hdfs.PeerCache.run(PeerCache.java:255)
	at org.apache.hadoop.hdfs.PeerCache.access$000(PeerCache.java:46)
	at org.apache.hadoop.hdfs.PeerCache$1.run(PeerCache.java:124)
	at java.lang.Thread.run(Thread.java:745)

"LeaseRenewer:jenkins@localhost:34626" daemon prio=10 tid=0x00007fa4a5ea8000 nid=0x4146 waiting on condition [0x00007fa484765000]
   java.lang.Thread.State: TIMED_WAITING (sleeping)
	at java.lang.Thread.sleep(Native Method)
	at org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:441)
	at org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$700(LeaseRenewer.java:75)
	at org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:303)
	at java.lang.Thread.run(Thread.java:745)

"process reaper" daemon prio=10 tid=0x00007fa4a5e95800 nid=0x413a waiting on condition [0x00007fa48415f000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007c5501238> (a java.util.concurrent.SynchronousQueue$TransferStack)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
	at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
	at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"process reaper" daemon prio=10 tid=0x00007fa49c02f800 nid=0x4138 waiting on condition [0x00007fa498056000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007c5501238> (a java.util.concurrent.SynchronousQueue$TransferStack)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
	at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
	at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"process reaper" daemon prio=10 tid=0x0000000002095800 nid=0x4136 waiting on condition [0x00007fa49808f000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007c5501238> (a java.util.concurrent.SynchronousQueue$TransferStack)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
	at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
	at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"client DomainSocketWatcher" daemon prio=10 tid=0x00007fa4a5e75800 nid=0x4128 runnable [0x00007fa484d6b000]
   java.lang.Thread.State: RUNNABLE
	at org.apache.hadoop.net.unix.DomainSocketWatcher.doPoll0(Native Method)
	at org.apache.hadoop.net.unix.DomainSocketWatcher.access$900(DomainSocketWatcher.java:52)
	at org.apache.hadoop.net.unix.DomainSocketWatcher$2.run(DomainSocketWatcher.java:511)
	at java.lang.Thread.run(Thread.java:745)

"nioEventLoopGroup-4-1" prio=10 tid=0x00007fa4a5d3d800 nid=0x4114 runnable [0x00007fa48617f000]
   java.lang.Thread.State: RUNNABLE
	at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
	at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
	at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
	at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
	- locked <0x00000007c54e1e78> (a io.netty.channel.nio.SelectedSelectionKeySet)
	- locked <0x00000007c5518210> (a java.util.Collections$UnmodifiableSet)
	- locked <0x00000007c54e1d70> (a sun.nio.ch.EPollSelectorImpl)
	at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
	at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:622)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:310)
	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
	at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
	at java.lang.Thread.run(Thread.java:745)

"nioEventLoopGroup-2-1" prio=10 tid=0x00007fa4a5634800 nid=0x40f9 runnable [0x00007fa487896000]
   java.lang.Thread.State: RUNNABLE
	at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
	at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
	at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
	at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
	- locked <0x00000007c50d8390> (a io.netty.channel.nio.SelectedSelectionKeySet)
	- locked <0x00000007c50da470> (a java.util.Collections$UnmodifiableSet)
	- locked <0x00000007c50d8288> (a sun.nio.ch.EPollSelectorImpl)
	at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
	at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:622)
	at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:310)
	at io.netty.util.concurrent.SingleThreadEventExecutor$2.run(SingleThreadEventExecutor.java:116)
	at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
	at java.lang.Thread.run(Thread.java:745)

"process reaper" daemon prio=10 tid=0x00007fa4a4975000 nid=0x40c8 waiting on condition [0x00007fa4980d6000]
   java.lang.Thread.State: TIMED_WAITING (parking)
	at sun.misc.Unsafe.park(Native Method)
	- parking to wait for  <0x00000007c5501238> (a java.util.concurrent.SynchronousQueue$TransferStack)
	at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
	at java.util.concurrent.SynchronousQueue$TransferStack.awaitFulfill(SynchronousQueue.java:460)
	at java.util.concurrent.SynchronousQueue$TransferStack.transfer(SynchronousQueue.java:359)
	at java.util.concurrent.SynchronousQueue.poll(SynchronousQueue.java:942)
	at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
	at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
	at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
	at java.lang.Thread.run(Thread.java:745)

"Service Thread" daemon prio=10 tid=0x00007fa4a4291000 nid=0x40c5 runnable [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"C2 CompilerThread1" daemon prio=10 tid=0x00007fa4a428e800 nid=0x40c4 waiting on condition [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"C2 CompilerThread0" daemon prio=10 tid=0x00007fa4a428b800 nid=0x40c3 waiting on condition [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"Signal Dispatcher" daemon prio=10 tid=0x00007fa4a4281800 nid=0x40c2 runnable [0x0000000000000000]
   java.lang.Thread.State: RUNNABLE

"Finalizer" daemon prio=10 tid=0x00007fa4a426b000 nid=0x40c1 in Object.wait() [0x00007fa499214000]
   java.lang.Thread.State: WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	- waiting on <0x00000007c449b898> (a java.lang.ref.ReferenceQueue$Lock)
	at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
	- locked <0x00000007c449b898> (a java.lang.ref.ReferenceQueue$Lock)
	at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
	at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:189)

"Reference Handler" daemon prio=10 tid=0x00007fa4a4267000 nid=0x40c0 in Object.wait() [0x00007fa499315000]
   java.lang.Thread.State: WAITING (on object monitor)
	at java.lang.Object.wait(Native Method)
	- waiting on <0x00000007c449b328> (a java.lang.ref.Reference$Lock)
	at java.lang.Object.wait(Object.java:503)
	at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:133)
	- locked <0x00000007c449b328> (a java.lang.ref.Reference$Lock)

"main" prio=10 tid=0x00007fa4a400a000 nid=0x40b1 waiting on condition [0x00007fa4ac73a000]
   java.lang.Thread.State: TIMED_WAITING (sleeping)
	at java.lang.Thread.sleep(Native Method)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2267)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2283)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1491)
	- locked <0x00000007c52040e0> (a org.apache.hadoop.hdfs.MiniDFSCluster)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:835)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:471)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:430)
	at org.apache.hadoop.hdfs.TestFSOutputSummer.doTestFSOutputSummer(TestFSOutputSummer.java:131)
	at org.apache.hadoop.hdfs.TestFSOutputSummer.testFSOutputSummer(TestFSOutputSummer.java:122)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
	at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
	at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
	at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
	at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
	at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
	at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
	at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
	at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
	at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
	at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
	at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
	at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
	at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)

"VM Thread" prio=10 tid=0x00007fa4a4264800 nid=0x40bf runnable 

"GC task thread#0 (ParallelGC)" prio=10 tid=0x00007fa4a4020000 nid=0x40b2 runnable 

"GC task thread#1 (ParallelGC)" prio=10 tid=0x00007fa4a4022000 nid=0x40b3 runnable 

"GC task thread#2 (ParallelGC)" prio=10 tid=0x00007fa4a4023800 nid=0x40b4 runnable 

"GC task thread#3 (ParallelGC)" prio=10 tid=0x00007fa4a4025800 nid=0x40b5 runnable 

"GC task thread#4 (ParallelGC)" prio=10 tid=0x00007fa4a4027800 nid=0x40b6 runnable 

"GC task thread#5 (ParallelGC)" prio=10 tid=0x00007fa4a4029800 nid=0x40b7 runnable 

"GC task thread#6 (ParallelGC)" prio=10 tid=0x00007fa4a402b000 nid=0x40b8 runnable 

"GC task thread#7 (ParallelGC)" prio=10 tid=0x00007fa4a402d000 nid=0x40b9 runnable 

"GC task thread#8 (ParallelGC)" prio=10 tid=0x00007fa4a402f000 nid=0x40ba runnable 

"GC task thread#9 (ParallelGC)" prio=10 tid=0x00007fa4a4030800 nid=0x40bb runnable 

"GC task thread#10 (ParallelGC)" prio=10 tid=0x00007fa4a4032800 nid=0x40bc runnable 

"GC task thread#11 (ParallelGC)" prio=10 tid=0x00007fa4a4034800 nid=0x40bd runnable 

"GC task thread#12 (ParallelGC)" prio=10 tid=0x00007fa4a4036800 nid=0x40be runnable 

"VM Periodic Task Thread" prio=10 tid=0x00007fa4a429b800 nid=0x40c6 waiting on condition 

JNI global references: 230

+ echo '************ END  zombies jstack extract'
************ END  zombies jstack extract
+ JIRA_COMMENT='

     {color:red}-1 core zombie tests{color}.  There are 1 zombie test(s): 	at org.apache.hadoop.hdfs.TestFSOutputSummer.testFSOutputSummer(TestFSOutputSummer.java:121)'
+ BAD=1
+ jps
+ grep surefirebooter
+ xargs kill -9
+ cut -d ' ' -f 1
POST BUILD TASK : SUCCESS
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-0.98 #977
Archived 1736 artifacts
Archive block size is 32768
Received 19 blocks and 286735855 bytes
Compression is 0.2%
Took 2 min 48 sec
Recording test results
Updating HBASE-13628

Jenkins build is back to normal : HBase-0.98 #981

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/HBase-0.98/981/changes>


Build failed in Jenkins: HBase-0.98 #980

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/HBase-0.98/980/changes>

Changes:

[ramkrishna] HBASE-13632 -  Backport HBASE-13368 to branch-1 and 0.98 (Ram)

------------------------------------------
[...truncated 2144 lines...]
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoFileContent(HRegionFileSystem.java:756)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:840)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:803)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.createRegionOnFileSystem(HRegionFileSystem.java:869)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4507)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4477)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4450)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4528)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4408)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTestRegion(HBaseTestingUtility.java:3439)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompactionInternals(TestCacheOnWrite.java:429)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction(TestCacheOnWrite.java:485)

testStoreFileCacheOnWrite[106](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.092 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.fs.FilterFileSystem.create(FilterFileSystem.java:174)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter.createOutputStream(AbstractHFileWriter.java:266)
	at org.apache.hadoop.hbase.io.hfile.HFile$WriterFactory.create(HFile.java:302)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:755)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:706)
	at org.apache.hadoop.hbase.regionserver.StoreFile$WriterBuilder.build(StoreFile.java:644)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.writeStoreFile(TestCacheOnWrite.java:384)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWriteInternals(TestCacheOnWrite.java:262)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWrite(TestCacheOnWrite.java:479)

testNotCachingDataBlocksDuringCompaction[107](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.087 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoFileContent(HRegionFileSystem.java:756)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:840)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:803)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.createRegionOnFileSystem(HRegionFileSystem.java:869)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4507)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4477)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4450)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4528)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4408)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTestRegion(HBaseTestingUtility.java:3439)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompactionInternals(TestCacheOnWrite.java:429)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction(TestCacheOnWrite.java:485)

testStoreFileCacheOnWrite[107](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.09 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.fs.FilterFileSystem.create(FilterFileSystem.java:174)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter.createOutputStream(AbstractHFileWriter.java:266)
	at org.apache.hadoop.hbase.io.hfile.HFile$WriterFactory.create(HFile.java:302)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:755)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:706)
	at org.apache.hadoop.hbase.regionserver.StoreFile$WriterBuilder.build(StoreFile.java:644)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.writeStoreFile(TestCacheOnWrite.java:384)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWriteInternals(TestCacheOnWrite.java:262)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWrite(TestCacheOnWrite.java:479)

Running org.apache.hadoop.hbase.io.encoding.TestLoadAndSwitchEncodeOnDisk
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.909 sec - in org.apache.hadoop.hbase.io.encoding.TestLoadAndSwitchEncodeOnDisk
Running org.apache.hadoop.hbase.io.encoding.TestChangingEncoding
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.402 sec - in org.apache.hadoop.hbase.io.encoding.TestChangingEncoding
Running org.apache.hadoop.hbase.io.encoding.TestEncodedSeekers
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 88.985 sec - in org.apache.hadoop.hbase.io.encoding.TestEncodedSeekers
Running org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders
Tests run: 28, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 195.96 sec - in org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders
Running org.apache.hadoop.hbase.io.encoding.TestBufferedDataBlockEncoder
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 21.683 sec - in org.apache.hadoop.hbase.io.encoding.TestBufferedDataBlockEncoder
Running org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.703 sec - in org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Running org.apache.hadoop.hbase.filter.TestFilterWrapper
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.673 sec - in org.apache.hadoop.hbase.filter.TestFilterWrapper
Running org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.694 sec - in org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Running org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.716 sec - in org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter

Results :


Tests in error: 
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:486->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory


Tests run: 2254, Failures: 0, Errors: 16, Skipped: 22

[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] HBase ............................................. SUCCESS [3.068s]
[INFO] HBase - Checkstyle ................................ SUCCESS [0.594s]
[INFO] HBase - Annotations ............................... SUCCESS [0.838s]
[INFO] HBase - Common .................................... SUCCESS [45.980s]
[INFO] HBase - Protocol .................................. SUCCESS [9.030s]
[INFO] HBase - Client .................................... SUCCESS [51.847s]
[INFO] HBase - Hadoop Compatibility ...................... SUCCESS [6.956s]
[INFO] HBase - Hadoop Two Compatibility .................. SUCCESS [6.196s]
[INFO] HBase - Prefix Tree ............................... SUCCESS [8.066s]
[INFO] HBase - Server .................................... FAILURE [3:34:24.899s]
[INFO] HBase - Testing Util .............................. SKIPPED
[INFO] HBase - Thrift .................................... SKIPPED
[INFO] HBase - Rest ...................................... SKIPPED
[INFO] HBase - Shell ..................................... SKIPPED
[INFO] HBase - Integration Tests ......................... SKIPPED
[INFO] HBase - Examples .................................. SKIPPED
[INFO] HBase - Assembly .................................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 3:36:38.372s
[INFO] Finished at: Wed May 06 15:41:24 UTC 2015
[INFO] Final Memory: 52M/615M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.18:test (secondPartTestsExecution) on project hbase-server: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hbase-server
Build step 'Invoke top-level Maven targets' marked build as failure
Performing Post build task...
Match found for :.* : True
Logical operation result is TRUE
Running script  :   ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
  if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
    #It seems sometimes the tests are not dying immediately. Let's give them 10s
    echo "Suspicious java process found - waiting 10s to see if there are just slow to stop"
    sleep 10   
    ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
    if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
      echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire but survived"
      echo "************ BEGIN zombies jstack extract"
      ZB_STACK=`jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test" | grep "\.java"`
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack
      echo "************ END  zombies jstack extract"
      JIRA_COMMENT="$JIRA_COMMENT

     {color:red}-1 core zombie tests{color}.  There are ${ZOMBIE_TESTS_COUNT} zombie test(s): ${ZB_STACK}"
      BAD=1
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9
    else
      echo "We're ok: there is no zombie test, but some tests took some time to stop"
    fi
  else
    echo "We're ok: there is no zombie test"
  fi
[HBase-0.98] $ /bin/bash -xe /tmp/hudson7296907306642758762.sh
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=1
+ [[ 1 != 0 ]]
+ echo 'Suspicious java process found - waiting 10s to see if there are just slow to stop'
Suspicious java process found - waiting 10s to see if there are just slow to stop
+ sleep 10
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=1
+ [[ 1 != 0 ]]
+ echo 'There are 1 zombie tests, they should have been killed by surefire but survived'
There are 1 zombie tests, they should have been killed by surefire but survived
+ echo '************ BEGIN zombies jstack extract'
************ BEGIN zombies jstack extract
++ jps
++ grep surefirebooter
++ cut -d ' ' -f 1
++ grep .test
++ xargs -n 1 jstack
++ grep '\.java'
30552: Unable to open socket file: target process not responding or HotSpot VM not loaded
The -F option can be used when the target process is not responding
+ ZB_STACK=
POST BUILD TASK : FAILURE
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-0.98 #977
Archived 1736 artifacts
Archive block size is 32768
Received 19 blocks and 290872558 bytes
Compression is 0.2%
Took 2 min 36 sec
Recording test results
Updating HBASE-13632
Updating HBASE-13368