You are viewing a plain text version of this content. The canonical link for it is here.
Posted to builds@hbase.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/06/30 07:23:59 UTC

Build failed in Jenkins: HBase-0.98 #1043

See <https://builds.apache.org/job/HBase-0.98/1043/changes>

Changes:

[larsh] HBASE-13959 Region splitting uses a single thread in most common cases. (Hari Krishna Dara)

------------------------------------------
[...truncated 2117 lines...]
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoFileContent(HRegionFileSystem.java:758)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:842)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:805)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.createRegionOnFileSystem(HRegionFileSystem.java:871)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4548)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4518)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4491)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4569)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4449)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTestRegion(HBaseTestingUtility.java:3500)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompactionInternals(TestCacheOnWrite.java:429)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction(TestCacheOnWrite.java:485)

testStoreFileCacheOnWrite[106](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.092 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.fs.FilterFileSystem.create(FilterFileSystem.java:174)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter.createOutputStream(AbstractHFileWriter.java:266)
	at org.apache.hadoop.hbase.io.hfile.HFile$WriterFactory.create(HFile.java:302)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:755)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:706)
	at org.apache.hadoop.hbase.regionserver.StoreFile$WriterBuilder.build(StoreFile.java:644)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.writeStoreFile(TestCacheOnWrite.java:384)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWriteInternals(TestCacheOnWrite.java:262)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWrite(TestCacheOnWrite.java:479)

testNotCachingDataBlocksDuringCompaction[107](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.087 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoFileContent(HRegionFileSystem.java:758)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:842)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.writeRegionInfoOnFilesystem(HRegionFileSystem.java:805)
	at org.apache.hadoop.hbase.regionserver.HRegionFileSystem.createRegionOnFileSystem(HRegionFileSystem.java:871)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4548)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4518)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4491)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4569)
	at org.apache.hadoop.hbase.regionserver.HRegion.createHRegion(HRegion.java:4449)
	at org.apache.hadoop.hbase.HBaseTestingUtility.createTestRegion(HBaseTestingUtility.java:3500)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompactionInternals(TestCacheOnWrite.java:429)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction(TestCacheOnWrite.java:485)

testStoreFileCacheOnWrite[107](org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite)  Time elapsed: 0.091 sec  <<< ERROR!
java.lang.OutOfMemoryError: unable to create new native thread
	at java.lang.Thread.start0(Native Method)
	at java.lang.Thread.start(Thread.java:693)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:443)
	at org.apache.hadoop.util.Shell.run(Shell.java:379)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:589)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:678)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:661)
	at org.apache.hadoop.fs.RawLocalFileSystem.setPermission(RawLocalFileSystem.java:639)
	at org.apache.hadoop.fs.RawLocalFileSystem.create(RawLocalFileSystem.java:305)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:447)
	at org.apache.hadoop.fs.ChecksumFileSystem.create(ChecksumFileSystem.java:424)
	at org.apache.hadoop.fs.FilterFileSystem.create(FilterFileSystem.java:174)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:358)
	at org.apache.hadoop.hbase.util.FSUtils.create(FSUtils.java:333)
	at org.apache.hadoop.hbase.io.hfile.AbstractHFileWriter.createOutputStream(AbstractHFileWriter.java:266)
	at org.apache.hadoop.hbase.io.hfile.HFile$WriterFactory.create(HFile.java:302)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:755)
	at org.apache.hadoop.hbase.regionserver.StoreFile$Writer.<init>(StoreFile.java:706)
	at org.apache.hadoop.hbase.regionserver.StoreFile$WriterBuilder.build(StoreFile.java:644)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.writeStoreFile(TestCacheOnWrite.java:384)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWriteInternals(TestCacheOnWrite.java:262)
	at org.apache.hadoop.hbase.io.hfile.TestCacheOnWrite.testStoreFileCacheOnWrite(TestCacheOnWrite.java:479)

Running org.apache.hadoop.hbase.io.encoding.TestLoadAndSwitchEncodeOnDisk
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.463 sec - in org.apache.hadoop.hbase.io.encoding.TestLoadAndSwitchEncodeOnDisk
Running org.apache.hadoop.hbase.io.encoding.TestChangingEncoding
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 51.298 sec - in org.apache.hadoop.hbase.io.encoding.TestChangingEncoding
Running org.apache.hadoop.hbase.io.encoding.TestEncodedSeekers
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 90.232 sec - in org.apache.hadoop.hbase.io.encoding.TestEncodedSeekers
Running org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders
Tests run: 28, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 191.984 sec - in org.apache.hadoop.hbase.io.encoding.TestDataBlockEncoders
Running org.apache.hadoop.hbase.io.encoding.TestBufferedDataBlockEncoder
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 21.409 sec - in org.apache.hadoop.hbase.io.encoding.TestBufferedDataBlockEncoder
Running org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.766 sec - in org.apache.hadoop.hbase.filter.TestFilterWithScanLimits
Running org.apache.hadoop.hbase.filter.TestFilterWrapper
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.878 sec - in org.apache.hadoop.hbase.filter.TestFilterWrapper
Running org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.136 sec - in org.apache.hadoop.hbase.filter.TestFuzzyRowFilterEndToEnd
Running org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.707 sec - in org.apache.hadoop.hbase.filter.TestColumnRangeFilter
Running org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.692 sec - in org.apache.hadoop.hbase.filter.TestFuzzyRowAndColumnRangeFilter

Results :


Tests in error: 
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testNotCachingDataBlocksDuringCompaction:485->testNotCachingDataBlocksDuringCompactionInternals:429 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory
  TestCacheOnWrite.testStoreFileCacheOnWrite:479->testStoreFileCacheOnWriteInternals:262->writeStoreFile:384 » OutOfMemory


Tests run: 2281, Failures: 0, Errors: 14, Skipped: 23

[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] HBase ............................................. SUCCESS [4.115s]
[INFO] HBase - Checkstyle ................................ SUCCESS [0.686s]
[INFO] HBase - Annotations ............................... SUCCESS [0.829s]
[INFO] HBase - Common .................................... SUCCESS [49.750s]
[INFO] HBase - Protocol .................................. SUCCESS [8.996s]
[INFO] HBase - Client .................................... SUCCESS [52.492s]
[INFO] HBase - Hadoop Compatibility ...................... SUCCESS [6.908s]
[INFO] HBase - Hadoop Two Compatibility .................. SUCCESS [6.214s]
[INFO] HBase - Prefix Tree ............................... SUCCESS [8.068s]
[INFO] HBase - Server .................................... FAILURE [3:43:05.501s]
[INFO] HBase - Testing Util .............................. SKIPPED
[INFO] HBase - Thrift .................................... SKIPPED
[INFO] HBase - Rest ...................................... SKIPPED
[INFO] HBase - Shell ..................................... SKIPPED
[INFO] HBase - Integration Tests ......................... SKIPPED
[INFO] HBase - Examples .................................. SKIPPED
[INFO] HBase - Assembly .................................. SKIPPED
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 3:45:24.409s
[INFO] Finished at: Tue Jun 30 05:20:59 UTC 2015
[INFO] Final Memory: 52M/634M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.18:test (secondPartTestsExecution) on project hbase-server: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hbase-server
Build step 'Invoke top-level Maven targets' marked build as failure
Performing Post build task...
Match found for :.* : True
Logical operation result is TRUE
Running script  :   ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
  if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
    #It seems sometimes the tests are not dying immediately. Let's give them 10s
    echo "Suspicious java process found - waiting 10s to see if there are just slow to stop"
    sleep 10   
    ZOMBIE_TESTS_COUNT=`jps | grep surefirebooter | wc -l`
    if [[ $ZOMBIE_TESTS_COUNT != 0 ]] ; then
      echo "There are $ZOMBIE_TESTS_COUNT zombie tests, they should have been killed by surefire but survived"
      echo "************ BEGIN zombies jstack extract"
      ZB_STACK=`jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack | grep ".test" | grep "\.java"`
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs -n 1 jstack
      echo "************ END  zombies jstack extract"
      JIRA_COMMENT="$JIRA_COMMENT

     {color:red}-1 core zombie tests{color}.  There are ${ZOMBIE_TESTS_COUNT} zombie test(s): ${ZB_STACK}"
      BAD=1
      jps | grep surefirebooter | cut -d ' ' -f 1 | xargs kill -9
    else
      echo "We're ok: there is no zombie test, but some tests took some time to stop"
    fi
  else
    echo "We're ok: there is no zombie test"
  fi
[HBase-0.98] $ /bin/bash -xe /tmp/hudson5968080482288640550.sh
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=1
+ [[ 1 != 0 ]]
+ echo 'Suspicious java process found - waiting 10s to see if there are just slow to stop'
Suspicious java process found - waiting 10s to see if there are just slow to stop
+ sleep 10
++ jps
++ grep surefirebooter
++ wc -l
+ ZOMBIE_TESTS_COUNT=1
+ [[ 1 != 0 ]]
+ echo 'There are 1 zombie tests, they should have been killed by surefire but survived'
There are 1 zombie tests, they should have been killed by surefire but survived
+ echo '************ BEGIN zombies jstack extract'
************ BEGIN zombies jstack extract
++ jps
++ cut -d ' ' -f 1
++ grep .test
++ grep surefirebooter
++ grep '\.java'
++ xargs -n 1 jstack
539: Unable to open socket file: target process not responding or HotSpot VM not loaded
The -F option can be used when the target process is not responding
+ ZB_STACK=
POST BUILD TASK : FAILURE
END OF POST BUILD TASK : 0
Archiving artifacts
Sending artifact delta relative to HBase-0.98 #1041
Archived 1777 artifacts
Archive block size is 32768
Received 20 blocks and 295220847 bytes
Compression is 0.2%
Took 2 min 16 sec
Recording test results
Updating HBASE-13959

Jenkins build is back to normal : HBase-0.98 #1044

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/HBase-0.98/1044/changes>