You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2016/05/12 12:47:28 UTC

Build failed in Jenkins: Hadoop-Hdfs-trunk #3131

See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3131/changes>

Changes:

[rohithsharmaks] YARN-5068. Expose scheduler queue to application master. (Harish

------------------------------------------
[...truncated 8535 lines...]
	at org.apache.hadoop.hdfs.DFSStripedOutputStream.checkStreamerFailures(DFSStripedOutputStream.java:631)
	at org.apache.hadoop.hdfs.DFSStripedOutputStream.writeChunk(DFSStripedOutputStream.java:547)
	at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:217)
	at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:164)
	at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:145)
	at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:79)
	at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:48)
	at java.io.DataOutputStream.write(DataOutputStream.java:88)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:441)
	... 13 more

	at org.junit.Assert.fail(Assert.java:88)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:327)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.run(TestDFSStripedOutputStreamWithFailure.java:527)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.test1(TestDFSStripedOutputStreamWithFailure.java:531)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74)

Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 35.186 sec - in org.apache.hadoop.hdfs.TestSafeModeWithStripedFile
Running org.apache.hadoop.hdfs.TestBlockStoragePolicy
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 31.585 sec - in org.apache.hadoop.hdfs.TestCrcCorruption
Running org.apache.hadoop.hdfs.TestDatanodeDeath
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 61.825 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure170
Running org.apache.hadoop.hdfs.TestParallelReadUtil
Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.081 sec - in org.apache.hadoop.hdfs.TestParallelReadUtil
Running org.apache.hadoop.hdfs.TestDFSUpgrade
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.7 sec - in org.apache.hadoop.hdfs.TestDFSUpgrade
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.044 sec - in org.apache.hadoop.hdfs.TestBlockStoragePolicy
Running org.apache.hadoop.hdfs.TestDFSShell
Running org.apache.hadoop.hdfs.TestFileAppend2
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 71.905 sec - in org.apache.hadoop.hdfs.TestDatanodeDeath
Running org.apache.hadoop.hdfs.TestKeyProviderCache
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.37 sec - in org.apache.hadoop.hdfs.TestKeyProviderCache
Running org.apache.hadoop.hdfs.TestListFilesInDFS
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 45.655 sec - in org.apache.hadoop.hdfs.TestFileAppend2
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure160
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.939 sec - in org.apache.hadoop.hdfs.TestListFilesInDFS
Running org.apache.hadoop.hdfs.TestAppendSnapshotTruncate
Tests run: 43, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 60.114 sec - in org.apache.hadoop.hdfs.TestDFSShell
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Tests run: 44, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 134.063 sec - in org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Running org.apache.hadoop.hdfs.TestDFSOutputStream
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.864 sec - in org.apache.hadoop.hdfs.TestAppendSnapshotTruncate
Running org.apache.hadoop.hdfs.TestHDFSServerPorts
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.724 sec - in org.apache.hadoop.hdfs.TestDFSOutputStream
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.661 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Running org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.811 sec - in org.apache.hadoop.hdfs.TestHDFSServerPorts
Running org.apache.hadoop.hdfs.TestDFSPermission
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.194 sec - in org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
Running org.apache.hadoop.hdfs.TestRestartDFS
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.112 sec - in org.apache.hadoop.hdfs.TestRestartDFS
Running org.apache.hadoop.cli.TestCryptoAdminCLI
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.384 sec - in org.apache.hadoop.hdfs.TestDFSPermission
Running org.apache.hadoop.cli.TestHDFSCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.902 sec - in org.apache.hadoop.cli.TestCryptoAdminCLI
Running org.apache.hadoop.cli.TestAclCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.508 sec - in org.apache.hadoop.cli.TestAclCLI
Running org.apache.hadoop.cli.TestDeleteCLI
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 123.812 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure160
Running org.apache.hadoop.cli.TestCacheAdminCLI
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 119.521 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.852 sec - in org.apache.hadoop.cli.TestDeleteCLI
Running org.apache.hadoop.cli.TestErasureCodingCLI
Running org.apache.hadoop.cli.TestXAttrCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.056 sec - in org.apache.hadoop.cli.TestCacheAdminCLI
Running org.apache.hadoop.TestRefreshCallQueue
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.258 sec - in org.apache.hadoop.TestRefreshCallQueue
Running org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.117 sec - in org.apache.hadoop.cli.TestXAttrCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.551 sec - in org.apache.hadoop.cli.TestErasureCodingCLI
Running org.apache.hadoop.security.TestPermission
Running org.apache.hadoop.security.TestRefreshUserMappings
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.047 sec - in org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.238 sec - in org.apache.hadoop.security.TestRefreshUserMappings
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.56 sec - in org.apache.hadoop.security.TestPermission
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 78.897 sec - in org.apache.hadoop.cli.TestHDFSCLI

Results :

Failed tests: 
  TestWebHdfsTimeouts.testAuthUrlReadTimeout:198 Expected to find 'localhost:40754: Read timed out' but got unexpected exception:java.net.SocketTimeoutException: localhost:40754: null
	at java.net.SocksSocketImpl.remainingMillis(SocksSocketImpl.java:111)
	at java.net.SocksSocketImpl.connect(SocksSocketImpl.java:392)
	at java.net.Socket.connect(Socket.java:579)
	at sun.net.NetworkClient.doConnect(NetworkClient.java:175)
	at sun.net.www.http.HttpClient.openServer(HttpClient.java:432)
	at sun.net.www.http.HttpClient.openServer(HttpClient.java:527)
	at sun.net.www.http.HttpClient.<init>(HttpClient.java:211)
	at sun.net.www.http.HttpClient.New(HttpClient.java:308)
	at sun.net.www.http.HttpClient.New(HttpClient.java:326)
	at sun.net.www.protocol.http.HttpURLConnection.getNewHttpClient(HttpURLConnection.java:996)
	at sun.net.www.protocol.http.HttpURLConnection.plainConnect(HttpURLConnection.java:932)
	at sun.net.www.protocol.http.HttpURLConnection.connect(HttpURLConnection.java:850)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:700)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.connect(WebHdfsFileSystem.java:653)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.runWithRetry(WebHdfsFileSystem.java:725)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.access$100(WebHdfsFileSystem.java:571)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner$1.run(WebHdfsFileSystem.java:602)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem$AbstractRunner.run(WebHdfsFileSystem.java:598)
	at org.apache.hadoop.hdfs.web.WebHdfsFileSystem.getDelegationToken(WebHdfsFileSystem.java:1506)
	at org.apache.hadoop.hdfs.web.TestWebHdfsTimeouts.testAuthUrlReadTimeout(TestWebHdfsTimeouts.java:195)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74)

  TestShortCircuitCache.testDataXceiverCleansUpSlotsOnFailure:682->checkNumberOfSegmentsAndSlots:628 expected:<1> but was:<2>
  TestNameNodeMetadataConsistency.testGenerationStampInFuture:113 expected:<17> but was:<0>
  TestScrLazyPersistFiles.testScrBlockFileCorruption:203->doShortCircuitReadBlockFileCorruptionTest:227->LazyPersistTestCase.ensureFileReplicasOnStorageType:141 
Expected: is <DISK>
     but: was <RAM_DISK>
  TestDFSStripedOutputStreamWithFailure190>TestDFSStripedOutputStreamWithFailure$TestBase.test1:531->TestDFSStripedOutputStreamWithFailure$TestBase.run:527 failed, dn=1, length=4128768java.io.IOException: Failed at i=3538943
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:443)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:419)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.runTest(TestDFSStripedOutputStreamWithFailure.java:322)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.run(TestDFSStripedOutputStreamWithFailure.java:527)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure$TestBase.test1(TestDFSStripedOutputStreamWithFailure.java:531)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
	at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
	at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
	at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
	at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74)
Caused by: java.io.IOException: Data streamers failed while creating new block streams: [#1: failed, blk_-9223372036854775759_1004, #3: failed, blk_-9223372036854775757_1004, #4: failed, blk_-9223372036854775756_1004, #7: failed, blk_-9223372036854775753_1004, #6: failed, blk_-9223372036854775754_1004, #5: failed, blk_-9223372036854775755_1004]. There are not enough healthy streamers.
	at org.apache.hadoop.hdfs.DFSStripedOutputStream.checkStreamerFailures(DFSStripedOutputStream.java:631)
	at org.apache.hadoop.hdfs.DFSStripedOutputStream.writeChunk(DFSStripedOutputStream.java:547)
	at org.apache.hadoop.fs.FSOutputSummer.writeChecksumChunks(FSOutputSummer.java:217)
	at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:164)
	at org.apache.hadoop.fs.FSOutputSummer.flushBuffer(FSOutputSummer.java:145)
	at org.apache.hadoop.fs.FSOutputSummer.write(FSOutputSummer.java:79)
	at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.write(FSDataOutputStream.java:48)
	at java.io.DataOutputStream.write(DataOutputStream.java:88)
	at org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure.write(TestDFSStripedOutputStreamWithFailure.java:441)
	... 13 more


Tests in error: 
  TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS:146->createDirectoriesSecurely:192 » IO
  TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI:289->internalTestConcurrentAsyncAPI:328 » 
  TestAsyncDFSRename.testConservativeConcurrentAsyncAPI:284->internalTestConcurrentAsyncAPI:312 » IO
  TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite:199->internalTestConcurrentAsyncRenameWithOverwrite:225->Object.wait:-2 » 
  TestAsyncDFSRename.testCallGetReturnValueMultipleTimes:133 » IO Cannot remove ...
  TestFileAppend.testMultipleAppends » IO Failed to replace a bad datanode on th...
  TestDFSUpgradeFromImage.testUpgradeFromRel1BBWImage:628->upgradeAndVerify:606->verifyFileSystem:229->verifyDir:214->dfsOpenFileWithRetries:178 » IO

Tests run: 4400, Failures: 5, Errors: 7, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:06 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:17 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.116 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:22 h
[INFO] Finished at: 2016-05-12T12:47:25+00:00
[INFO] Final Memory: 71M/900M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Jenkins build is back to normal : Hadoop-Hdfs-trunk #3137

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3137/changes>


---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Build failed in Jenkins: Hadoop-Hdfs-trunk #3136

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3136/changes>

Changes:

[wang] Revert "Update project version to 3.0.0-alpha1-SNAPSHOT."

[lei] HDFS-9389. Add maintenance states to AdminStates. (Ming Ma via lei)

------------------------------------------
[...truncated 6202 lines...]
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 5 on 48072" daemon prio=5 tid=152 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"VolumeScannerThread(<https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/test/data/1/dfs/data/data6)"> daemon prio=5 tid=162 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:613)
"CacheReplicationMonitor(979110695)"  prio=5 tid=6291 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2176)
        at org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor.run(CacheReplicationMonitor.java:182)
"IPC Server listener on 48072" daemon prio=5 tid=139 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener.run(Server.java:901)
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 72.568 sec - in org.apache.hadoop.hdfs.TestAclsEndToEnd
Running org.apache.hadoop.hdfs.TestGetFileChecksum
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.033 sec - in org.apache.hadoop.hdfs.TestCrcCorruption
Running org.apache.hadoop.hdfs.TestLeaseRecovery2
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.324 sec - in org.apache.hadoop.hdfs.TestGetFileChecksum
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure000
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 35.895 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure000
Running org.apache.hadoop.hdfs.TestWriteConfigurationToDFS
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 105.326 sec - in org.apache.hadoop.hdfs.TestPread
Running org.apache.hadoop.hdfs.TestRollingUpgrade
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.763 sec - in org.apache.hadoop.hdfs.TestWriteConfigurationToDFS
Running org.apache.hadoop.hdfs.TestReservedRawPaths
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.582 sec - in org.apache.hadoop.hdfs.TestReservedRawPaths
Running org.apache.hadoop.hdfs.TestListFilesInDFS
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.349 sec - in org.apache.hadoop.hdfs.TestListFilesInDFS
Running org.apache.hadoop.hdfs.TestParallelShortCircuitReadUnCached
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 74.888 sec - in org.apache.hadoop.hdfs.TestLeaseRecovery2
Running org.apache.hadoop.hdfs.tools.TestDFSHAAdminMiniCluster
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.091 sec - in org.apache.hadoop.hdfs.tools.TestDFSHAAdminMiniCluster
Running org.apache.hadoop.hdfs.tools.TestGetConf
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.039 sec - in org.apache.hadoop.hdfs.tools.TestGetConf
Running org.apache.hadoop.hdfs.tools.TestStoragePolicyCommands
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.447 sec - in org.apache.hadoop.hdfs.tools.TestStoragePolicyCommands
Running org.apache.hadoop.hdfs.tools.TestDFSZKFailoverController
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.062 sec - in org.apache.hadoop.hdfs.tools.TestDFSZKFailoverController
Running org.apache.hadoop.hdfs.tools.TestDFSAdmin
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 43.987 sec - in org.apache.hadoop.hdfs.TestParallelShortCircuitReadUnCached
Running org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForXAttr
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.417 sec - in org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForXAttr
Running org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerWithStripedBlocks
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.228 sec - in org.apache.hadoop.hdfs.tools.TestDFSAdmin
Running org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.852 sec - in org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewer
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.732 sec - in org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerWithStripedBlocks
Running org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForContentSummary
Running org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForAcl
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.048 sec - in org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForContentSummary
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.691 sec - in org.apache.hadoop.hdfs.tools.offlineImageViewer.TestOfflineImageViewerForAcl
Running org.apache.hadoop.hdfs.tools.TestDFSHAAdmin
Running org.apache.hadoop.hdfs.tools.TestDelegationTokenFetcher
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.044 sec - in org.apache.hadoop.hdfs.tools.TestDFSHAAdmin
Running org.apache.hadoop.hdfs.tools.TestDebugAdmin
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.701 sec - in org.apache.hadoop.hdfs.tools.TestDelegationTokenFetcher
Running org.apache.hadoop.hdfs.tools.TestGetGroups
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 105.091 sec - in org.apache.hadoop.hdfs.TestRollingUpgrade
Running org.apache.hadoop.hdfs.tools.TestDFSAdminWithHA
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.241 sec - in org.apache.hadoop.hdfs.tools.TestDebugAdmin
Running org.apache.hadoop.hdfs.tools.offlineEditsViewer.TestOfflineEditsViewer
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.983 sec - in org.apache.hadoop.hdfs.tools.TestGetGroups
Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.812 sec - in org.apache.hadoop.hdfs.tools.TestDFSAdminWithHA
Running org.apache.hadoop.hdfs.TestClose
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.29 sec - in org.apache.hadoop.hdfs.tools.offlineEditsViewer.TestOfflineEditsViewer
Running org.apache.hadoop.hdfs.TestFetchImage
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.336 sec - in org.apache.hadoop.hdfs.TestClose
Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.353 sec - in org.apache.hadoop.hdfs.TestFetchImage
Running org.apache.hadoop.hdfs.TestBlocksScheduledCounter
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.051 sec - in org.apache.hadoop.hdfs.TestBlocksScheduledCounter
Running org.apache.hadoop.hdfs.protocolPB.TestPBHelper
Tests run: 30, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.933 sec - in org.apache.hadoop.hdfs.protocolPB.TestPBHelper
Running org.apache.hadoop.hdfs.TestFileAppend
Tests run: 7, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 258.733 sec <<< FAILURE! - in org.apache.hadoop.hdfs.TestAsyncDFSRename
testAggressiveConcurrentAsyncAPI(org.apache.hadoop.hdfs.TestAsyncDFSRename)  Time elapsed: 60.012 sec  <<< ERROR!
java.lang.Exception: test timed out after 60000 milliseconds
	at java.lang.Thread.sleep(Native Method)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2472)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2512)
	at org.apache.hadoop.hdfs.MiniDFSCluster.restartNameNodes(MiniDFSCluster.java:1977)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncAPI(TestAsyncDFSRename.java:395)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI(TestAsyncDFSRename.java:289)

"pool
Running org.apache.hadoop.hdfs.TestRollingUpgradeRollback
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.339 sec - in org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
Running org.apache.hadoop.hdfs.TestLease
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.745 sec - in org.apache.hadoop.hdfs.TestRollingUpgradeRollback
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.624 sec - in org.apache.hadoop.hdfs.TestLease
Running org.apache.hadoop.net.TestNetworkTopology
Running org.apache.hadoop.TestGenericRefresh
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.096 sec - in org.apache.hadoop.TestGenericRefresh
Running org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.116 sec - in org.apache.hadoop.net.TestNetworkTopology
Running org.apache.hadoop.tracing.TestTraceAdmin
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.581 sec - in org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Running org.apache.hadoop.tracing.TestTracing
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.644 sec - in org.apache.hadoop.tracing.TestTraceAdmin
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.355 sec - in org.apache.hadoop.hdfs.TestFileAppend
Running org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithHdfs
Running org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.725 sec - in org.apache.hadoop.tracing.TestTracing
Running org.apache.hadoop.cli.TestDeleteCLI
Tests run: 44, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 88.073 sec - in org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Running org.apache.hadoop.cli.TestHDFSCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.383 sec - in org.apache.hadoop.cli.TestDeleteCLI
Running org.apache.hadoop.cli.TestAclCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.105 sec - in org.apache.hadoop.cli.TestAclCLI
Running org.apache.hadoop.cli.TestErasureCodingCLI
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 29.486 sec - in org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs
Running org.apache.hadoop.cli.TestCacheAdminCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.018 sec - in org.apache.hadoop.cli.TestErasureCodingCLI
Running org.apache.hadoop.cli.TestXAttrCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.074 sec - in org.apache.hadoop.cli.TestCacheAdminCLI
Running org.apache.hadoop.cli.TestCryptoAdminCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.439 sec - in org.apache.hadoop.cli.TestXAttrCLI
Running org.apache.hadoop.security.TestPermission
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.091 sec - in org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithHdfs
Running org.apache.hadoop.security.TestRefreshUserMappings
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.815 sec - in org.apache.hadoop.cli.TestCryptoAdminCLI
Running org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.467 sec - in org.apache.hadoop.security.TestPermission
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.978 sec - in org.apache.hadoop.security.TestRefreshUserMappings
Running org.apache.hadoop.tools.TestTools
Running org.apache.hadoop.tools.TestHdfsConfigFields
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.068 sec - in org.apache.hadoop.tools.TestHdfsConfigFields
Running org.apache.hadoop.tools.TestJMXGet
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.272 sec - in org.apache.hadoop.tools.TestTools
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.739 sec - in org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.872 sec - in org.apache.hadoop.tools.TestJMXGet
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.425 sec - in org.apache.hadoop.cli.TestHDFSCLI

Results :

Failed tests: 
  TestEditLog.testBatchedSyncWithClosedLogs:594 logging edit without syncing should do not affect txid expected:<1> but was:<2>

Tests in error: 
  TestNameNodeRespectsBindHostKeys.testServiceRpcBindHostKey:144 » Bind Problem ...
  TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI:289->internalTestConcurrentAsyncAPI:395 » 

Tests run: 4418, Failures: 1, Errors: 2, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:06 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:00 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.100 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:04 h
[INFO] Finished at: 2016-05-13T01:12:36+00:00
[INFO] Final Memory: 57M/730M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Hadoop-Hdfs-trunk - Build # 3136 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-trunk/3136/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 6395 lines...]
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:06 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:00 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.100 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:04 h
[INFO] Finished at: 2016-05-13T01:12:36+00:00
[INFO] Final Memory: 57M/730M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
3 tests failed.
FAILED:  org.apache.hadoop.hdfs.server.namenode.TestEditLog.testBatchedSyncWithClosedLogs[1]

Error Message:
logging edit without syncing should do not affect txid expected:<1> but was:<2>

Stack Trace:
java.lang.AssertionError: logging edit without syncing should do not affect txid expected:<1> but was:<2>
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotEquals(Assert.java:743)
	at org.junit.Assert.assertEquals(Assert.java:118)
	at org.junit.Assert.assertEquals(Assert.java:555)
	at org.apache.hadoop.hdfs.server.namenode.TestEditLog.testBatchedSyncWithClosedLogs(TestEditLog.java:594)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestNameNodeRespectsBindHostKeys.testServiceRpcBindHostKey

Error Message:
Problem binding to [0.0.0.0:49908] java.net.BindException: Address already in use; For more details see:  http://wiki.apache.org/hadoop/BindException

Stack Trace:
java.net.BindException: Problem binding to [0.0.0.0:49908] java.net.BindException: Address already in use; For more details see:  http://wiki.apache.org/hadoop/BindException
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.apache.hadoop.ipc.Server.bind(Server.java:530)
	at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:793)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:2592)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:958)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server.<init>(ProtobufRpcEngine.java:563)
	at org.apache.hadoop.ipc.ProtobufRpcEngine.getServer(ProtobufRpcEngine.java:538)
	at org.apache.hadoop.ipc.RPC$Builder.build(RPC.java:800)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:332)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:783)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:710)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:924)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:903)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1620)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247)
	at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.server.namenode.TestNameNodeRespectsBindHostKeys.testServiceRpcBindHostKey(TestNameNodeRespectsBindHostKeys.java:144)


FAILED:  org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI

Error Message:
test timed out after 60000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 60000 milliseconds
	at java.lang.Thread.sleep(Native Method)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2472)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2512)
	at org.apache.hadoop.hdfs.MiniDFSCluster.restartNameNodes(MiniDFSCluster.java:1977)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncAPI(TestAsyncDFSRename.java:395)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI(TestAsyncDFSRename.java:289)




Build failed in Jenkins: Hadoop-Hdfs-trunk #3135

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3135/changes>

Changes:

[jlowe] YARN-5053. More informative diagnostics when applications killed by a

[aw] HADOOP-12581. ShellBasedIdMapping needs suport for Solaris (Alan

------------------------------------------
[...truncated 28044 lines...]
  TestDistributedFileSystem.testAllWithNoXmlDefaults:1021->testDFSClose:177 » IO
  TestPread.testMaxOutHedgedReadPool:381 » IO Timed out waiting for Mini HDFS Cl...
  TestAsyncDFSRename.testAsyncRenameWithOverwrite:69 » IO Timed out waiting for ...
  TestDistributedFileSystem.testRemoteRackOfFirstDegreeReadStatistics:811->testReadFileSystemStatistics:832 » IO
  TestFileAppend2.testSimpleAppend2:233 » IO Timed out waiting for Mini HDFS Clu...
  TestPread.testHedgedReadLoopTooManyTimes:321 » IO Timed out waiting for Mini H...
  TestDistributedFileSystem.testCreateWithCustomChecksum:1103 » IO Timed out wai...
  TestFileAppend2.testSimpleAppend:84 » IO Timed out waiting for Mini HDFS Clust...
  TestCrcCorruption.testCorruptionDuringWrt:97 » IO Timed out waiting for Mini H...
  TestPread.testPreadDFS:256->dfsPreadTest:456 » IO Timed out waiting for Mini H...
  TestDistributedFileSystem.testFileCloseStatus:1143 » IO Timed out waiting for ...
  TestFileAppend2.testAppendLessThanChecksumChunk:553 » IO Timed out waiting for...
  TestCrcCorruption.testCrcCorruption:233->thistest:161 » IO Timed out waiting f...
  TestPread.testPreadDFSSimulated:474->testPreadDFS:256->dfsPreadTest:456 » IO T...
  TestFileAppend2.testComplexAppend:538->testComplexAppend:489 » IO Timed out wa...
  TestGetFileChecksum.setUp:45 » IO Timed out waiting for Mini HDFS Cluster to s...
  TestCrcCorruption.testEntirelyCorruptFileThreeNodes:268->doTestEntirelyCorruptFile:279 » IO
  TestPread.testHedgedPreadDFSBasic:278->dfsPreadTest:456 » IO Timed out waiting...
  TestFileAppend2.testComplexAppend2:543->testComplexAppend:489 » IO Timed out w...
  TestGetFileChecksum.setUp:45 » IO Timed out waiting for Mini HDFS Cluster to s...
  TestCrcCorruption.testEntirelyCorruptFileOneNode:255->doTestEntirelyCorruptFile:279 » IO
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestWriteConfigurationToDFS.testWriteConf:39 » IO Timed out waiting for Mini H...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestListFilesInDFS.testSetUp:42 » IO Timed out waiting for Mini HDFS Cluster t...
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestRollingUpgrade.testDFSAdminDatanodeUpgradeControlCommands:396 » IO Timed o...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestParallelShortCircuitReadUnCached.setupCluster:66->TestParallelReadUtil.setupCluster:71 » IO
  TestParallelShortCircuitReadUnCached.teardownCluster:78->TestParallelReadUtil.teardownCluster:394 » NullPointer
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestRollingUpgrade.testRollback:310 » IO Timed out waiting for Mini HDFS Clust...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestRollingUpgrade.testCheckpointWithSNN:654 » IO Timed out waiting for Mini H...
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestStoragePolicyCommands.clusterSetUp:48 » IO Timed out waiting for Mini HDFS...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestLeaseRecovery2.startUp:98 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestStoragePolicyCommands.clusterSetUp:48 » IO Timed out waiting for Mini HDFS...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestReservedRawPaths.setup:80 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestOfflineImageViewerForXAttr.createOriginalFSImage:74 » IO Timed out waiting...
  TestDFSAdmin.setUp:75->restartCluster:92 » IO Timed out waiting for Mini HDFS ...
  TestOfflineImageViewerWithStripedBlocks.setup:61 » IO Timed out waiting for Mi...
  TestDFSAdmin.setUp:75->restartCluster:92 » IO Timed out waiting for Mini HDFS ...
  TestOfflineImageViewer.createOriginalFSImage:118 » IO Timed out waiting for Mi...
  TestOfflineImageViewerForContentSummary.createOriginalFSImage:68 » IO Timed ou...
  TestOfflineImageViewerForAcl.createOriginalFSImage:102 » IO Timed out waiting ...
  TestDFSAdmin.setUp:75->restartCluster:92 » IO Timed out waiting for Mini HDFS ...
  TestDebugAdmin.setUp:51 » IO Timed out waiting for Mini HDFS Cluster to start
  TestDFSAdmin.setUp:75->restartCluster:92 » IO Timed out waiting for Mini HDFS ...
  TestOfflineEditsViewer.setUp:89 » IO Timed out waiting for Mini HDFS Cluster t...
  TestDebugAdmin.setUp:51 » IO Timed out waiting for Mini HDFS Cluster to start
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestOfflineEditsViewer.setUp:89 » IO Timed out waiting for Mini HDFS Cluster t...
  TestDebugAdmin.setUp:51 » IO Timed out waiting for Mini HDFS Cluster to start
  TestClose.testWriteAfterClose:37 » IO Timed out waiting for Mini HDFS Cluster ...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestOfflineEditsViewer.setUp:89 » IO Timed out waiting for Mini HDFS Cluster t...
  TestFetchImage.testFetchImage:58 » IO Timed out waiting for Mini HDFS Cluster ...
  TestInjectionForSimulatedStorage.testInjection:130 » IO Timed out waiting for ...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestOfflineEditsViewer.setUp:89 » IO Timed out waiting for Mini HDFS Cluster t...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestBlocksScheduledCounter.testBlocksScheduledCounter:56 » IO Timed out waitin...
  TestOfflineEditsViewer.setUp:89 » IO Timed out waiting for Mini HDFS Cluster t...
  TestFileAppend.testSimpleFlush:178 » IO Timed out waiting for Mini HDFS Cluste...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestBlocksScheduledCounter.testScheduledBlocksCounterShouldDecrementOnAbandonBlock:88 » IO
  TestFileAppend.testFailedAppendBlockRejection:514 » IO Timed out waiting for M...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestLease.testLeaseAfterRename:166 » IO Timed out waiting for Mini HDFS Cluste...
  TestFileAppend.testAppendAfterSoftLimit:427 » IO Timed out waiting for Mini HD...
  TestNetworkTopology.testInvalidNetworkTopologiesNotCachedInHdfs:391 » IO Timed...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestLease.testLeaseAfterRenameAndRecreate:263 » IO Timed out waiting for Mini ...
  TestFileAppend.testMultiAppend2:567 » IO Timed out waiting for Mini HDFS Clust...
  TestGenericRefresh.setUpBeforeClass:60 » IO Timed out waiting for Mini HDFS Cl...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestFileAppend.testAppendCorruptedBlock:639 »  test timed out after 10000 mill...
  TestLease.testLease:301 » IO Timed out waiting for Mini HDFS Cluster to start
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestTracingShortCircuitLocalRead.testShortCircuitTraceHooks:81 » IO Timed out ...
  TestFileAppend.testBreakHardlinksIfNeeded:112 » IO Timed out waiting for Mini ...
  TestLease.testLeaseAbort:77 » IO Timed out waiting for Mini HDFS Cluster to st...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestFileAppend.testComplexFlush:231 » IO Timed out waiting for Mini HDFS Clust...
  TestTraceAdmin.testCreateAndDestroySpanReceiver:67 » IO Timed out waiting for ...
  TestTracing.startCluster:215 » IO Timed out waiting for Mini HDFS Cluster to s...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestFileAppend.testMultipleAppends:375 » IO Timed out waiting for Mini HDFS Cl...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestFileAppend.testFileNotFound »  Unexpected exception, expected<java.io.File...
  TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS:140 » IO
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestFileAppend.testAppend2Twice:332 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithSecureHdfs.testWithSecureHDFS:90 » IO Timed out w...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestFileAppend.testAppend2AfterSoftLimit:467 » IO Timed out waiting for Mini H...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestFileAppend.testAppendTwice:293 » IO Timed out waiting for Mini HDFS Cluste...
  TestDeleteCLI.setUp:47 » IO Timed out waiting for Mini HDFS Cluster to start
  TestDeleteCLI.tearDown:68->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:79 » NullPointer
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestAclCLI.setUp:40 » IO Timed out waiting for Mini HDFS Cluster to start
  TestAclCLI.tearDown:49->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:68 » NullPointer
  TestHDFSCLI.setUp:56 » IO Timed out waiting for Mini HDFS Cluster to start
  TestHDFSCLI.tearDown:87->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:93 » NullPointer
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestErasureCodingCLI.setUp:48 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestErasureCodingCLI.tearDown:75->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:81 » NullPointer
  TestCacheAdminCLI.setUp:63 » IO Timed out waiting for Mini HDFS Cluster to sta...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestXAttrCLI.setUp:49 » IO Timed out waiting for Mini HDFS Cluster to start
  TestXAttrCLI.tearDown:77->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:83 » NullPointer
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestCryptoAdminCLI.setUp:72 » IO Timed out waiting for Mini HDFS Cluster to st...
  TestCryptoAdminCLI.tearDown:96->CLITestHelper.tearDown:125->CLITestHelper.displayResults:163->expandCommand:117 » NullPointer
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestPermission.testFilePermission:186 » IO Timed out waiting for Mini HDFS Clu...
  TestRefreshUserMappings.setUp:94 » IO Timed out waiting for Mini HDFS Cluster ...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestRollingFileSystemSinkWithHdfs.setupHdfs:57 » IO Timed out waiting for Mini...
  TestPermission.testCreate:128 » IO Timed out waiting for Mini HDFS Cluster to ...
  TestRefreshUserMappings.setUp:94 » IO Timed out waiting for Mini HDFS Cluster ...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestPermissionSymlinks.beforeClassSetUp:78 » IO Timed out waiting for Mini HDF...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestJMXGet.testDataNode:156 » IO Timed out waiting for Mini HDFS Cluster to st...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestJMXGet.testNameNode:98 » IO Timed out waiting for Mini HDFS Cluster to sta...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...
  TestHDFSFileSystemContract.setUp:39 » IO Timed out waiting for Mini HDFS Clust...

Tests run: 3262, Failures: 34, Errors: 1797, Skipped: 7

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-alpha1-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:08 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:49 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.132 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:53 h
[INFO] Finished at: 2016-05-13T00:04:52+00:00
[INFO] Final Memory: 70M/1012M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Hadoop-Hdfs-trunk - Build # 3134 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-trunk/3134/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 18038 lines...]
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.initCluster(TestAclConfigFlag.java:167)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.testModifyAclEntries(TestAclConfigFlag.java:67)

testEditLog(org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag)  Time elapsed: 12.031 sec  <<< ERROR!
java.io.IOException: Timed out waiting for Mini HDFS Cluster to start
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitClusterUp(MiniDFSCluster.java:1345)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:848)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.initCluster(TestAclConfigFlag.java:167)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.testEditLog(TestAclConfigFlag.java:120)

testGetAclStatus(org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag)  Time elapsed: 11.858 sec  <<< ERROR!
java.io.IOException: Timed out waiting for Mini HDFS Cluster to start
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitClusterUp(MiniDFSCluster.java:1345)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:848)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.initCluster(TestAclConfigFlag.java:167)
	at org.apache.hadoop.hdfs.server.namenode.TestAclConfigFlag.testGetAclStatus(TestAclConfigFlag.java:111)

Running org.apache.hadoop.hdfs.server.namenode.TestDiskspaceQuotaUpdate
Running org.apache.hadoop.hdfs.server.namenode.TestQuotaWithStripedBlocks
Slave went offline during the build
ERROR: Connection was broken: java.io.IOException: Unexpected termination of the channel
	at hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:50)
Caused by: java.io.EOFException
	at java.io.ObjectInputStream$PeekInputStream.readFully(ObjectInputStream.java:2325)
	at java.io.ObjectInputStream$BlockDataInputStream.readShort(ObjectInputStream.java:2794)
	at java.io.ObjectInputStream.readStreamHeader(ObjectInputStream.java:801)
	at java.io.ObjectInputStream.<init>(ObjectInputStream.java:299)
	at hudson.remoting.ObjectInputStreamEx.<init>(ObjectInputStreamEx.java:48)
	at hudson.remoting.AbstractSynchronousByteArrayCommandTransport.read(AbstractSynchronousByteArrayCommandTransport.java:34)
	at hudson.remoting.SynchronousCommandTransport$ReaderThread.run(SynchronousCommandTransport.java:48)

Build step 'Execute shell' marked build as failure
ERROR: Step ?Archive the artifacts? failed: no workspace for Hadoop-Hdfs-trunk #3134
ERROR: Step ?Publish JUnit test result report? failed: no workspace for Hadoop-Hdfs-trunk #3134
ERROR: Build step failed with exception
java.lang.NullPointerException
	at hudson.plugins.violations.ViolationsPublisher.perform(ViolationsPublisher.java:74)
	at hudson.tasks.BuildStepMonitor$3.perform(BuildStepMonitor.java:45)
	at hudson.model.AbstractBuild$AbstractBuildExecution.perform(AbstractBuild.java:782)
	at hudson.model.AbstractBuild$AbstractBuildExecution.performAllBuildSteps(AbstractBuild.java:723)
	at hudson.model.Build$BuildExecution.post2(Build.java:185)
	at hudson.model.AbstractBuild$AbstractBuildExecution.post(AbstractBuild.java:668)
	at hudson.model.Run.execute(Run.java:1763)
	at hudson.model.FreeStyleBuild.run(FreeStyleBuild.java:43)
	at hudson.model.ResourceController.execute(ResourceController.java:98)
	at hudson.model.Executor.run(Executor.java:410)
Build step 'Report Violations' marked build as failure
ERROR: Step ?E-mail Notification? failed: no workspace for Hadoop-Hdfs-trunk #3134
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any
ERROR: H9 is offline; cannot locate JDK 1.7 (latest)
ERROR: H9 is offline; cannot locate JDK 1.7 (latest)




###################################################################################
############################## FAILED TESTS (if any) ##############################
No tests ran.

Build failed in Jenkins: Hadoop-Hdfs-trunk #3133

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3133/changes>

Changes:

[stevel] HADOOP-13116 Jets3tNativeS3FileSystemContractTest does not run.

------------------------------------------
[...truncated 8198 lines...]
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.531 sec - in org.apache.hadoop.hdfs.util.TestLightWeightHashSet
Running org.apache.hadoop.hdfs.util.TestCyclicIteration
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.153 sec - in org.apache.hadoop.hdfs.util.TestCyclicIteration
Running org.apache.hadoop.hdfs.util.TestDiff
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.468 sec - in org.apache.hadoop.hdfs.util.TestDiff
Running org.apache.hadoop.hdfs.TestDFSStartupVersions
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 33.91 sec - in org.apache.hadoop.hdfs.TestFileConcurrentReader
Running org.apache.hadoop.hdfs.TestWriteBlockGetsBlockLengthHint
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.568 sec - in org.apache.hadoop.hdfs.TestWriteBlockGetsBlockLengthHint
Running org.apache.hadoop.hdfs.TestReservedRawPaths
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 25.539 sec - in org.apache.hadoop.hdfs.TestDFSStartupVersions
Running org.apache.hadoop.hdfs.TestRead
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 107.868 sec - in org.apache.hadoop.hdfs.TestDFSStorageStateRecovery
Running org.apache.hadoop.hdfs.TestAppendDifferentChecksum
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.832 sec - in org.apache.hadoop.hdfs.TestReservedRawPaths
Running org.apache.hadoop.hdfs.TestDFSRollback
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.039 sec - in org.apache.hadoop.hdfs.TestRead
Running org.apache.hadoop.hdfs.TestMiniDFSCluster
Tests run: 3, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 14.371 sec - in org.apache.hadoop.hdfs.TestAppendDifferentChecksum
Running org.apache.hadoop.hdfs.TestClientReportBadBlock
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.044 sec - in org.apache.hadoop.hdfs.TestMiniDFSCluster
Running org.apache.hadoop.hdfs.TestFileStatusWithECPolicy
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.737 sec - in org.apache.hadoop.hdfs.TestClientReportBadBlock
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.181 sec - in org.apache.hadoop.hdfs.TestDFSRollback
Running org.apache.hadoop.hdfs.TestApplyingStoragePolicy
Running org.apache.hadoop.hdfs.TestReadStripedFileWithMissingBlocks
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.178 sec - in org.apache.hadoop.hdfs.TestFileStatusWithECPolicy
Running org.apache.hadoop.hdfs.TestLeaseRecovery2
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.068 sec - in org.apache.hadoop.hdfs.TestApplyingStoragePolicy
Running org.apache.hadoop.hdfs.protocol.TestLayoutVersion
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.511 sec - in org.apache.hadoop.hdfs.protocol.TestLayoutVersion
Running org.apache.hadoop.hdfs.protocol.datatransfer.TestPacketReceiver
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.81 sec - in org.apache.hadoop.hdfs.protocol.datatransfer.TestPacketReceiver
Running org.apache.hadoop.hdfs.protocol.datatransfer.sasl.TestSaslDataTransfer
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 128.263 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure180
Running org.apache.hadoop.hdfs.protocol.TestAnnotations
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.31 sec - in org.apache.hadoop.hdfs.protocol.TestAnnotations
Running org.apache.hadoop.hdfs.protocol.TestLocatedBlock
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.588 sec - in org.apache.hadoop.hdfs.protocol.TestLocatedBlock
Running org.apache.hadoop.hdfs.protocol.TestBlockListAsLongs
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.933 sec - in org.apache.hadoop.hdfs.protocol.TestBlockListAsLongs
Running org.apache.hadoop.hdfs.TestAbandonBlock
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.272 sec - in org.apache.hadoop.hdfs.TestAbandonBlock
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure190
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 64.459 sec - in org.apache.hadoop.hdfs.TestReadStripedFileWithMissingBlocks
Running org.apache.hadoop.hdfs.crypto.TestHdfsCryptoStreams
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.506 sec - in org.apache.hadoop.hdfs.protocol.datatransfer.sasl.TestSaslDataTransfer
Running org.apache.hadoop.hdfs.TestFileAppendRestart
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.574 sec - in org.apache.hadoop.hdfs.TestFileAppendRestart
Running org.apache.hadoop.hdfs.TestFetchImage
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.356 sec - in org.apache.hadoop.hdfs.TestFetchImage
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure200
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 48.509 sec - in org.apache.hadoop.hdfs.crypto.TestHdfsCryptoStreams
Running org.apache.hadoop.hdfs.TestSafeModeWithStripedFile
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 83.744 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure190
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure170
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 46.337 sec - in org.apache.hadoop.hdfs.TestSafeModeWithStripedFile
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure010
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 87.535 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure200
Running org.apache.hadoop.hdfs.TestFileCreationDelete
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 22.201 sec - in org.apache.hadoop.hdfs.TestFileCreationDelete
Running org.apache.hadoop.hdfs.TestCrcCorruption
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 85.407 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure170
Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.796 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure010
Running org.apache.hadoop.hdfs.TestBlockStoragePolicy
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 36.392 sec - in org.apache.hadoop.hdfs.TestCrcCorruption
Running org.apache.hadoop.hdfs.TestDatanodeDeath
Tests run: 20, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 55.147 sec - in org.apache.hadoop.hdfs.TestBlockStoragePolicy
Running org.apache.hadoop.hdfs.TestParallelReadUtil
Tests run: 1, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.056 sec - in org.apache.hadoop.hdfs.TestParallelReadUtil
Running org.apache.hadoop.hdfs.TestDFSUpgrade
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 26.877 sec - in org.apache.hadoop.hdfs.TestDFSUpgrade
Running org.apache.hadoop.hdfs.TestDFSShell
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 77.469 sec - in org.apache.hadoop.hdfs.TestDatanodeDeath
Running org.apache.hadoop.hdfs.TestFileAppend2
Tests run: 44, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 139.668 sec - in org.apache.hadoop.hdfs.TestHDFSFileSystemContract
Running org.apache.hadoop.hdfs.TestKeyProviderCache
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.635 sec - in org.apache.hadoop.hdfs.TestKeyProviderCache
Running org.apache.hadoop.hdfs.TestListFilesInDFS
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.313 sec - in org.apache.hadoop.hdfs.TestListFilesInDFS
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure160
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.81 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure160
Running org.apache.hadoop.hdfs.TestAppendSnapshotTruncate
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 57.252 sec - in org.apache.hadoop.hdfs.TestFileAppend2
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.952 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Running org.apache.hadoop.hdfs.TestDFSOutputStream
Tests run: 43, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 81.64 sec <<< FAILURE! - in org.apache.hadoop.hdfs.TestDFSShell
testMoveWithTargetPortEmpty(org.apache.hadoop.hdfs.TestDFSShell)  Time elapsed: 0.294 sec  <<< ERROR!
java.net.BindException: Problem binding to [localhost:9820] java.net.BindException: Address already in use; For more details see:  http://wiki.apache.org/hadoop/BindException
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.apache.hadoop.ipc.Server.bind(Server.java:530)
	at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:793)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:2592)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:958)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server.<init>(ProtobufRpcEngine.java:563)
	at org.apache.hadoop.ipc.ProtobufRpcEngine.getServer(ProtobufRpcEngine.java:538)
	at org.apache.hadoop.ipc.RPC$Builder.build(RPC.java:800)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:426)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:783)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:710)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:924)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:903)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1620)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247)
	at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.TestDFSShell.testMoveWithTargetPortEmpty(TestDFSShell.java:567)

Running org.apache.hadoop.hdfs.TestHDFSServerPorts
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.252 sec - in org.apache.hadoop.hdfs.TestDFSOutputStream
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.502 sec - in org.apache.hadoop.hdfs.TestHDFSServerPorts
Running org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 39.21 sec - in org.apache.hadoop.hdfs.TestAppendSnapshotTruncate
Running org.apache.hadoop.hdfs.TestDFSPermission
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.777 sec - in org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
Running org.apache.hadoop.hdfs.TestRestartDFS
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 45.695 sec - in org.apache.hadoop.hdfs.TestDFSPermission
Running org.apache.hadoop.cli.TestCryptoAdminCLI
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 30.255 sec - in org.apache.hadoop.hdfs.TestRestartDFS
Running org.apache.hadoop.cli.TestHDFSCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.933 sec - in org.apache.hadoop.cli.TestCryptoAdminCLI
Running org.apache.hadoop.cli.TestAclCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.851 sec - in org.apache.hadoop.cli.TestAclCLI
Running org.apache.hadoop.cli.TestDeleteCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.477 sec - in org.apache.hadoop.cli.TestDeleteCLI
Running org.apache.hadoop.cli.TestCacheAdminCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.342 sec - in org.apache.hadoop.cli.TestCacheAdminCLI
Running org.apache.hadoop.cli.TestErasureCodingCLI
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 128.335 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Running org.apache.hadoop.cli.TestXAttrCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.705 sec - in org.apache.hadoop.cli.TestErasureCodingCLI
Running org.apache.hadoop.TestRefreshCallQueue
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.718 sec - in org.apache.hadoop.cli.TestXAttrCLI
Running org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.915 sec - in org.apache.hadoop.TestRefreshCallQueue
Running org.apache.hadoop.security.TestPermission
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 89.277 sec - in org.apache.hadoop.cli.TestHDFSCLI
Running org.apache.hadoop.security.TestRefreshUserMappings
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.626 sec - in org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.922 sec - in org.apache.hadoop.security.TestPermission
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.833 sec - in org.apache.hadoop.security.TestRefreshUserMappings

Results :

Failed tests: 
  TestNameNodeMetadataConsistency.testGenerationStampInFuture:113 expected:<17> but was:<0>
  TestBlockManager.testBlockReportQueueing:1074 null

Tests in error: 
  TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS:146->createDirectoriesSecurely:192 » IO
  TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI:289->internalTestConcurrentAsyncAPI:328->Object.wait:-2 » 
  TestFileAppend.testMultipleAppends » IO Failed to replace a bad datanode on th...
  TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite:199->internalTestConcurrentAsyncRenameWithOverwrite:226->Object.wait:-2 » 
  TestSecureNameNode.testName:65 » IO Failed on local exception: java.io.IOExcep...
  TestDFSShell.testMoveWithTargetPortEmpty:567 » Bind Problem binding to [localh...

Tests run: 4411, Failures: 2, Errors: 6, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:22 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:27 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.139 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:32 h
[INFO] Finished at: 2016-05-12T18:04:38+00:00
[INFO] Final Memory: 72M/801M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Hadoop-Hdfs-trunk - Build # 3133 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-trunk/3133/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 8391 lines...]
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:22 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:27 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.139 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:32 h
[INFO] Finished at: 2016-05-12T18:04:38+00:00
[INFO] Final Memory: 72M/801M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There was a timeout or other error in the fork -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
8 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI

Error Message:
test timed out after 60000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 60000 milliseconds
	at java.lang.Object.wait(Native Method)
	at org.apache.hadoop.hdfs.DataStreamer.waitForAckedSeqno(DataStreamer.java:768)
	at org.apache.hadoop.hdfs.DFSOutputStream.flushInternal(DFSOutputStream.java:697)
	at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:778)
	at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:755)
	at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72)
	at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:101)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:430)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:379)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:372)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:365)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncAPI(TestAsyncDFSRename.java:328)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncAPI(TestAsyncDFSRename.java:289)


FAILED:  org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite

Error Message:
test timed out after 60000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 60000 milliseconds
	at java.lang.Object.wait(Native Method)
	at org.apache.hadoop.hdfs.DataStreamer.waitForAckedSeqno(DataStreamer.java:768)
	at org.apache.hadoop.hdfs.DFSOutputStream.flushInternal(DFSOutputStream.java:697)
	at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:778)
	at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:755)
	at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72)
	at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:101)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:430)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:379)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:372)
	at org.apache.hadoop.hdfs.DFSTestUtil.createFile(DFSTestUtil.java:365)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.internalTestConcurrentAsyncRenameWithOverwrite(TestAsyncDFSRename.java:226)
	at org.apache.hadoop.hdfs.TestAsyncDFSRename.testAggressiveConcurrentAsyncRenameWithOverwrite(TestAsyncDFSRename.java:199)


FAILED:  org.apache.hadoop.hdfs.TestDFSShell.testMoveWithTargetPortEmpty

Error Message:
Problem binding to [localhost:9820] java.net.BindException: Address already in use; For more details see:  http://wiki.apache.org/hadoop/BindException

Stack Trace:
java.net.BindException: Problem binding to [localhost:9820] java.net.BindException: Address already in use; For more details see:  http://wiki.apache.org/hadoop/BindException
	at sun.nio.ch.Net.bind0(Native Method)
	at sun.nio.ch.Net.bind(Net.java:444)
	at sun.nio.ch.Net.bind(Net.java:436)
	at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:214)
	at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
	at org.apache.hadoop.ipc.Server.bind(Server.java:530)
	at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:793)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:2592)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:958)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Server.<init>(ProtobufRpcEngine.java:563)
	at org.apache.hadoop.ipc.ProtobufRpcEngine.getServer(ProtobufRpcEngine.java:538)
	at org.apache.hadoop.ipc.RPC$Builder.build(RPC.java:800)
	at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.<init>(NameNodeRpcServer.java:426)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createRpcServer(NameNode.java:783)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.initialize(NameNode.java:710)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:924)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.<init>(NameNode.java:903)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.createNameNode(NameNode.java:1620)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNode(MiniDFSCluster.java:1247)
	at org.apache.hadoop.hdfs.MiniDFSCluster.configureNameService(MiniDFSCluster.java:1016)
	at org.apache.hadoop.hdfs.MiniDFSCluster.createNameNodesAndSetConf(MiniDFSCluster.java:891)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:823)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.TestDFSShell.testMoveWithTargetPortEmpty(TestDFSShell.java:567)


FAILED:  org.apache.hadoop.hdfs.TestFileAppend.testMultipleAppends

Error Message:
Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:35341,DS-35dfa487-c50f-46d8-bd79-7d20c7780d35,DISK], DatanodeInfoWithStorage[127.0.0.1:41516,DS-ae64448e-6434-4a70-a23c-b8e8a5c6bda5,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:41516,DS-ae64448e-6434-4a70-a23c-b8e8a5c6bda5,DISK], DatanodeInfoWithStorage[127.0.0.1:35341,DS-35dfa487-c50f-46d8-bd79-7d20c7780d35,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration.

Stack Trace:
java.io.IOException: Failed to replace a bad datanode on the existing pipeline due to no more good datanodes being available to try. (Nodes: current=[DatanodeInfoWithStorage[127.0.0.1:35341,DS-35dfa487-c50f-46d8-bd79-7d20c7780d35,DISK], DatanodeInfoWithStorage[127.0.0.1:41516,DS-ae64448e-6434-4a70-a23c-b8e8a5c6bda5,DISK]], original=[DatanodeInfoWithStorage[127.0.0.1:41516,DS-ae64448e-6434-4a70-a23c-b8e8a5c6bda5,DISK], DatanodeInfoWithStorage[127.0.0.1:35341,DS-35dfa487-c50f-46d8-bd79-7d20c7780d35,DISK]]). The current failed datanode replacement policy is DEFAULT, and a client may configure this via 'dfs.client.block.write.replace-datanode-on-failure.policy' in its configuration.
	at org.apache.hadoop.hdfs.DataStreamer.findNewDatanode(DataStreamer.java:1166)
	at org.apache.hadoop.hdfs.DataStreamer.addDatanode2ExistingPipeline(DataStreamer.java:1236)
	at org.apache.hadoop.hdfs.DataStreamer.handleDatanodeReplacement(DataStreamer.java:1427)
	at org.apache.hadoop.hdfs.DataStreamer.setupPipelineInternal(DataStreamer.java:1342)
	at org.apache.hadoop.hdfs.DataStreamer.setupPipelineForAppendOrRecovery(DataStreamer.java:1325)
	at org.apache.hadoop.hdfs.DataStreamer.run(DataStreamer.java:603)


FAILED:  org.apache.hadoop.hdfs.server.blockmanagement.TestBlockManager.testBlockReportQueueing

Error Message:
null

Stack Trace:
java.lang.AssertionError: null
	at org.junit.Assert.fail(Assert.java:86)
	at org.junit.Assert.assertTrue(Assert.java:41)
	at org.junit.Assert.assertTrue(Assert.java:52)
	at org.apache.hadoop.hdfs.server.blockmanagement.TestBlockManager.testBlockReportQueueing(TestBlockManager.java:1074)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestNameNodeMetadataConsistency.testGenerationStampInFuture

Error Message:
expected:<17> but was:<0>

Stack Trace:
java.lang.AssertionError: expected:<17> but was:<0>
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotEquals(Assert.java:743)
	at org.junit.Assert.assertEquals(Assert.java:118)
	at org.junit.Assert.assertEquals(Assert.java:555)
	at org.junit.Assert.assertEquals(Assert.java:542)
	at org.apache.hadoop.hdfs.server.namenode.TestNameNodeMetadataConsistency.testGenerationStampInFuture(TestNameNodeMetadataConsistency.java:113)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestSecureNameNode.testName

Error Message:
Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":58382; 

Stack Trace:
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":58382; 
	at sun.security.krb5.KdcComm.send(KdcComm.java:250)
	at sun.security.krb5.KdcComm.send(KdcComm.java:191)
	at sun.security.krb5.KrbTgsReq.send(KrbTgsReq.java:187)
	at sun.security.krb5.KrbTgsReq.sendAndGetCreds(KrbTgsReq.java:202)
	at sun.security.krb5.internal.CredentialsUtil.serviceCreds(CredentialsUtil.java:311)
	at sun.security.krb5.internal.CredentialsUtil.acquireServiceCreds(CredentialsUtil.java:115)
	at sun.security.krb5.Credentials.acquireServiceCreds(Credentials.java:449)
	at sun.security.jgss.krb5.Krb5Context.initSecContext(Krb5Context.java:641)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:248)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
	at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:411)
	at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:617)
	at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:417)
	at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:799)
	at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:795)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:794)
	at org.apache.hadoop.ipc.Client$Connection.access$3200(Client.java:417)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1547)
	at org.apache.hadoop.ipc.Client.call(Client.java:1394)
	at org.apache.hadoop.ipc.Client.call(Client.java:1358)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:241)
	at com.sun.proxy.$Proxy19.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:582)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:257)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103)
	at com.sun.proxy.$Proxy20.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2302)
	at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2277)
	at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1119)
	at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1116)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1116)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1108)
	at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1909)
	at org.apache.hadoop.hdfs.server.namenode.TestSecureNameNode.testName(TestSecureNameNode.java:65)


FAILED:  org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS

Error Message:
Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":47390; 

Stack Trace:
java.io.IOException: Failed on local exception: java.io.IOException: javax.security.sasl.SaslException: GSS initiate failed [Caused by GSSException: No valid credentials provided (Mechanism level: Cannot get a KDC reply)]; Host Details : local host is: "asf903.gq1.ygridcore.net/67.195.81.147"; destination host is: "localhost":47390; 
	at sun.security.krb5.KdcComm.send(KdcComm.java:250)
	at sun.security.krb5.KdcComm.send(KdcComm.java:191)
	at sun.security.krb5.KrbTgsReq.send(KrbTgsReq.java:187)
	at sun.security.krb5.KrbTgsReq.sendAndGetCreds(KrbTgsReq.java:202)
	at sun.security.krb5.internal.CredentialsUtil.serviceCreds(CredentialsUtil.java:311)
	at sun.security.krb5.internal.CredentialsUtil.acquireServiceCreds(CredentialsUtil.java:115)
	at sun.security.krb5.Credentials.acquireServiceCreds(Credentials.java:449)
	at sun.security.jgss.krb5.Krb5Context.initSecContext(Krb5Context.java:641)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:248)
	at sun.security.jgss.GSSContextImpl.initSecContext(GSSContextImpl.java:179)
	at com.sun.security.sasl.gsskerb.GssKrb5Client.evaluateChallenge(GssKrb5Client.java:193)
	at org.apache.hadoop.security.SaslRpcClient.saslConnect(SaslRpcClient.java:411)
	at org.apache.hadoop.ipc.Client$Connection.setupSaslConnection(Client.java:617)
	at org.apache.hadoop.ipc.Client$Connection.access$2000(Client.java:417)
	at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:799)
	at org.apache.hadoop.ipc.Client$Connection$2.run(Client.java:795)
	at java.security.AccessController.doPrivileged(Native Method)
	at javax.security.auth.Subject.doAs(Subject.java:415)
	at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1755)
	at org.apache.hadoop.ipc.Client$Connection.setupIOstreams(Client.java:794)
	at org.apache.hadoop.ipc.Client$Connection.access$3200(Client.java:417)
	at org.apache.hadoop.ipc.Client.getConnection(Client.java:1547)
	at org.apache.hadoop.ipc.Client.call(Client.java:1394)
	at org.apache.hadoop.ipc.Client.call(Client.java:1358)
	at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:241)
	at com.sun.proxy.$Proxy25.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:582)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
	at java.lang.reflect.Method.invoke(Method.java:606)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:257)
	at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:103)
	at com.sun.proxy.$Proxy27.mkdirs(Unknown Source)
	at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:2302)
	at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2277)
	at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1119)
	at org.apache.hadoop.hdfs.DistributedFileSystem$25.doCall(DistributedFileSystem.java:1116)
	at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1116)
	at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1108)
	at org.apache.hadoop.fs.FileSystem.mkdirs(FileSystem.java:1909)
	at org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.createDirectoriesSecurely(TestRollingFileSystemSinkWithSecureHdfs.java:192)
	at org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs.testMissingPropertiesWithSecureHDFS(TestRollingFileSystemSinkWithSecureHdfs.java:146)




Build failed in Jenkins: Hadoop-Hdfs-trunk #3132

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk/3132/changes>

Changes:

[stevel] HADOOP-13122 Customize User-Agent header sent in HTTP requests by S3A.

------------------------------------------
[...truncated 5719 lines...]
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.724 sec - in org.apache.hadoop.hdfs.TestSetrepDecreasing
Running org.apache.hadoop.hdfs.TestRead
Running org.apache.hadoop.hdfs.TestHttpPolicy
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.965 sec - in org.apache.hadoop.hdfs.TestHttpPolicy
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure040
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.582 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure040
Running org.apache.hadoop.hdfs.TestWriteBlockGetsBlockLengthHint
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.497 sec - in org.apache.hadoop.hdfs.TestWriteBlockGetsBlockLengthHint
Running org.apache.hadoop.hdfs.TestLocalDFS
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.751 sec - in org.apache.hadoop.hdfs.TestRead
Running org.apache.hadoop.hdfs.TestBlocksScheduledCounter
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.474 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure160
Running org.apache.hadoop.hdfs.TestApplyingStoragePolicy
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.542 sec - in org.apache.hadoop.hdfs.TestBlocksScheduledCounter
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.737 sec - in org.apache.hadoop.hdfs.TestLocalDFS
Running org.apache.hadoop.hdfs.TestSetrepIncreasing
Running org.apache.hadoop.hdfs.TestDecommission
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.011 sec - in org.apache.hadoop.hdfs.TestApplyingStoragePolicy
Running org.apache.hadoop.hdfs.TestMultiThreadedHflush
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.541 sec - in org.apache.hadoop.hdfs.TestMultiThreadedHflush
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 57.249 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure110
Running org.apache.hadoop.hdfs.TestMissingBlocksAlert
Running org.apache.hadoop.hdfs.TestDFSUpgradeFromImage
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.179 sec - in org.apache.hadoop.hdfs.TestMissingBlocksAlert
Running org.apache.hadoop.hdfs.TestFileStatus
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 37.099 sec - in org.apache.hadoop.hdfs.TestSetrepIncreasing
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.656 sec - in org.apache.hadoop.hdfs.TestFileStatus
Running org.apache.hadoop.hdfs.TestBalancerBandwidth
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.205 sec - in org.apache.hadoop.hdfs.TestDFSUpgradeFromImage
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.574 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure060
Running org.apache.hadoop.hdfs.TestSetTimes
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.16 sec - in org.apache.hadoop.hdfs.TestBalancerBandwidth
Running org.apache.hadoop.TestGenericRefresh
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.751 sec - in org.apache.hadoop.hdfs.TestSetTimes
Running org.apache.hadoop.tracing.TestTracing
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.633 sec - in org.apache.hadoop.TestGenericRefresh
Running org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.048 sec - in org.apache.hadoop.tracing.TestTracing
Running org.apache.hadoop.tracing.TestTraceAdmin
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.044 sec - in org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Running org.apache.hadoop.security.TestPermission
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.68 sec - in org.apache.hadoop.tracing.TestTraceAdmin
Running org.apache.hadoop.security.TestPermissionSymlinks
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.69 sec - in org.apache.hadoop.security.TestPermission
Running org.apache.hadoop.security.TestRefreshUserMappings
Tests run: 15, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.064 sec - in org.apache.hadoop.security.TestPermissionSymlinks
Running org.apache.hadoop.fs.TestFcHdfsSetUMask
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.317 sec - in org.apache.hadoop.security.TestRefreshUserMappings
Running org.apache.hadoop.fs.TestSymlinkHdfsFileSystem
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 60.474 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure150
Running org.apache.hadoop.fs.loadGenerator.TestLoadGenerator
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.101 sec - in org.apache.hadoop.fs.TestFcHdfsSetUMask
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractRename
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.799 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractRename
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractDelete
Tests run: 74, Failures: 0, Errors: 0, Skipped: 2, Time elapsed: 17.148 sec - in org.apache.hadoop.fs.TestSymlinkHdfsFileSystem
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractAppend
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.122 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractDelete
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractOpen
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.135 sec - in org.apache.hadoop.fs.loadGenerator.TestLoadGenerator
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractGetFileStatus
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.946 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractAppend
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractConcat
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.12 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractGetFileStatus
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.558 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractOpen
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractMkdir
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractCreate
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.802 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractConcat
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractRootDirectory
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.183 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractMkdir
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractSeek
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.859 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractCreate
Running org.apache.hadoop.fs.contract.hdfs.TestHDFSContractSetTimes
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.675 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractRootDirectory
Running org.apache.hadoop.fs.TestEnhancedByteBufferAccess
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.622 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractSetTimes
Running org.apache.hadoop.fs.permission.TestStickyBit
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.33 sec - in org.apache.hadoop.fs.contract.hdfs.TestHDFSContractSeek
Running org.apache.hadoop.fs.TestSymlinkHdfsDisable
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.077 sec - in org.apache.hadoop.fs.TestSymlinkHdfsDisable
Running org.apache.hadoop.fs.TestHDFSFileContextMainOperations
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.636 sec - in org.apache.hadoop.fs.permission.TestStickyBit
Running org.apache.hadoop.fs.TestWebHdfsFileContextMainOperations
Tests run: 10, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 16.408 sec - in org.apache.hadoop.fs.TestEnhancedByteBufferAccess
Running org.apache.hadoop.fs.TestSWebHdfsFileContextMainOperations
Tests run: 69, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.474 sec - in org.apache.hadoop.fs.TestHDFSFileContextMainOperations
Running org.apache.hadoop.fs.TestSymlinkHdfsFileContext
Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 11.293 sec - in org.apache.hadoop.fs.TestWebHdfsFileContextMainOperations
Running org.apache.hadoop.fs.TestUnbuffer
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.284 sec - in org.apache.hadoop.fs.TestUnbuffer
Running org.apache.hadoop.fs.shell.TestHdfsTextCommand
Tests run: 71, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.983 sec - in org.apache.hadoop.fs.TestSymlinkHdfsFileContext
Running org.apache.hadoop.fs.TestResolveHdfsSymlink
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.358 sec - in org.apache.hadoop.fs.shell.TestHdfsTextCommand
Running org.apache.hadoop.fs.TestFcHdfsCreateMkdir
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.849 sec - in org.apache.hadoop.fs.TestResolveHdfsSymlink
Running org.apache.hadoop.fs.TestFcHdfsPermission
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.525 sec - in org.apache.hadoop.fs.TestFcHdfsCreateMkdir
Tests run: 19, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 185.349 sec - in org.apache.hadoop.hdfs.TestDecommission
Running org.apache.hadoop.fs.TestGlobPaths
Running org.apache.hadoop.fs.TestUrlStreamHandler
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.838 sec - in org.apache.hadoop.fs.TestFcHdfsPermission
Running org.apache.hadoop.fs.viewfs.TestViewFileSystemAtHdfsRoot
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.136 sec - in org.apache.hadoop.fs.TestUrlStreamHandler
Tests run: 37, Failures: 0, Errors: 0, Skipped: 6, Time elapsed: 6.601 sec - in org.apache.hadoop.fs.TestGlobPaths
Running org.apache.hadoop.fs.viewfs.TestViewFsWithAcls
Running org.apache.hadoop.fs.viewfs.TestViewFsDefaultValue
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.027 sec - in org.apache.hadoop.fs.viewfs.TestViewFsWithAcls
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.126 sec - in org.apache.hadoop.fs.viewfs.TestViewFsDefaultValue
Running org.apache.hadoop.fs.viewfs.TestViewFileSystemWithAcls
Running org.apache.hadoop.fs.viewfs.TestViewFileSystemHdfs
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.898 sec - in org.apache.hadoop.fs.viewfs.TestViewFileSystemWithAcls
Running org.apache.hadoop.fs.viewfs.TestViewFsAtHdfsRoot
Tests run: 60, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.126 sec - in org.apache.hadoop.fs.viewfs.TestViewFileSystemAtHdfsRoot
Running org.apache.hadoop.fs.viewfs.TestViewFsFileStatusHdfs
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.813 sec - in org.apache.hadoop.fs.viewfs.TestViewFsFileStatusHdfs
Running org.apache.hadoop.fs.viewfs.TestViewFileSystemWithXAttrs
Tests run: 58, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.232 sec - in org.apache.hadoop.fs.viewfs.TestViewFsAtHdfsRoot
Tests run: 60, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 16.486 sec - in org.apache.hadoop.fs.viewfs.TestViewFileSystemHdfs
Running org.apache.hadoop.fs.viewfs.TestViewFsWithXAttrs
Running org.apache.hadoop.fs.viewfs.TestViewFsHdfs
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.546 sec - in org.apache.hadoop.fs.viewfs.TestViewFileSystemWithXAttrs
Running org.apache.hadoop.fs.TestHdfsNativeCodeLoader
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.252 sec - in org.apache.hadoop.fs.TestHdfsNativeCodeLoader
Running org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.684 sec - in org.apache.hadoop.fs.viewfs.TestViewFsWithXAttrs
Running org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithHdfs
Tests run: 58, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.996 sec - in org.apache.hadoop.fs.viewfs.TestViewFsHdfs
Running org.apache.hadoop.tools.TestJMXGet
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.706 sec - in org.apache.hadoop.tools.TestJMXGet
Running org.apache.hadoop.tools.TestTools
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 2.066 sec - in org.apache.hadoop.tools.TestTools
Running org.apache.hadoop.tools.TestHdfsConfigFields
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.02 sec - in org.apache.hadoop.tools.TestHdfsConfigFields
Running org.apache.hadoop.TestRefreshCallQueue
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 20.746 sec - in org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithSecureHdfs
Running org.apache.hadoop.net.TestNetworkTopology
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.334 sec - in org.apache.hadoop.TestRefreshCallQueue
Running org.apache.hadoop.cli.TestCacheAdminCLI
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.842 sec - in org.apache.hadoop.net.TestNetworkTopology
Running org.apache.hadoop.cli.TestCryptoAdminCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.513 sec - in org.apache.hadoop.cli.TestCacheAdminCLI
Running org.apache.hadoop.cli.TestDeleteCLI
Tests run: 61, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.915 sec - in org.apache.hadoop.fs.TestSWebHdfsFileContextMainOperations
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.913 sec - in org.apache.hadoop.cli.TestCryptoAdminCLI
Running org.apache.hadoop.cli.TestAclCLI
Running org.apache.hadoop.cli.TestErasureCodingCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.131 sec - in org.apache.hadoop.cli.TestDeleteCLI
Running org.apache.hadoop.cli.TestXAttrCLI
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.237 sec - in org.apache.hadoop.metrics2.sink.TestRollingFileSystemSinkWithHdfs
Running org.apache.hadoop.cli.TestHDFSCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.33 sec - in org.apache.hadoop.cli.TestAclCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.075 sec - in org.apache.hadoop.cli.TestErasureCodingCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 8.304 sec - in org.apache.hadoop.cli.TestXAttrCLI
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 59.772 sec - in org.apache.hadoop.cli.TestHDFSCLI

Results :

Tests in error: 
  TestFsDatasetCache.testPageRounder:476 » Timeout Timed out waiting for conditi...

Tests run: 4418, Failures: 0, Errors: 1, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:05 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:05 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.101 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:10 h
[INFO] Finished at: 2016-05-12T15:34:35+00:00
[INFO] Final Memory: 60M/900M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Hadoop-Hdfs-trunk/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

---------------------------------------------------------------------
To unsubscribe, e-mail: hdfs-dev-unsubscribe@hadoop.apache.org
For additional commands, e-mail: hdfs-dev-help@hadoop.apache.org


Hadoop-Hdfs-trunk - Build # 3132 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-trunk/3132/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 5912 lines...]
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.5:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [05:05 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  01:05 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.101 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 01:10 h
[INFO] Finished at: 2016-05-12T15:34:35+00:00
[INFO] Final Memory: 60M/900M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
FAILED:  org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCache.testPageRounder

Error Message:
Timed out waiting for condition. Thread diagnostics:
Timestamp: 2016-05-12 02:42:08,190

"VolumeScannerThread(/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data/data2)" daemon prio=5 tid=100 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:613)
"IPC Server handler 4 on 32944" daemon prio=5 tid=46 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.namenode.LeaseManager$Monitor@749c2306" daemon prio=5 tid=55 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.LeaseManager$Monitor.run(LeaseManager.java:339)
        at java.lang.Thread.run(Thread.java:745)
"Thread-86"  prio=5 tid=114 runnable
java.lang.Thread.State: RUNNABLE
        at java.lang.Thread.dumpThreads(Native Method)
        at java.lang.Thread.getAllStackTraces(Thread.java:1640)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDump(TimedOutTestsListener.java:87)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDiagnosticString(TimedOutTestsListener.java:73)
        at org.apache.hadoop.test.GenericTestUtils.waitFor(GenericTestUtils.java:269)
        at org.apache.hadoop.hdfs.DFSTestUtil.verifyExpectedCacheUsage(DFSTestUtil.java:1453)
        at org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCache.testPageRounder(TestFsDatasetCache.java:476)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
        at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
        at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
        at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
        at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74)
"org.apache.hadoop.hdfs.server.blockmanagement.PendingReconstructionBlocks$PendingReconstructionMonitor@53a46a8f" daemon prio=5 tid=39 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.PendingReconstructionBlocks$PendingReconstructionMonitor.run(PendingReconstructionBlocks.java:223)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 8 on 32944" daemon prio=5 tid=50 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server idle connection scanner for port 32944" daemon prio=5 tid=35 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server listener on 32944" daemon prio=5 tid=33 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener.run(Server.java:901)
"IPC Server handler 6 on 32944" daemon prio=5 tid=48 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"pool-2-thread-1"  prio=5 tid=22 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"853413050@qtp-1695356235-0" daemon prio=5 tid=67 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:626)
"Timer-4" daemon prio=5 tid=70 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"pool-9-thread-1"  prio=5 tid=83 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 2 on 41476" daemon prio=5 tid=86 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-5" daemon prio=5 tid=71 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server idle connection scanner for port 41476" daemon prio=5 tid=77 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"Timer for 'NameNode' metrics system" daemon prio=5 tid=19 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$LazyPersistFileScrubber@3444c36" daemon prio=5 tid=58 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$LazyPersistFileScrubber.run(FSNamesystem.java:3905)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner" daemon prio=5 tid=60 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
        at org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner.run(FileSystem.java:3231)
        at java.lang.Thread.run(Thread.java:745)
"141209778@qtp-923628031-1 - Acceptor0 SelectChannelConnector@localhost:41197" daemon prio=5 tid=24 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.mortbay.io.nio.SelectorManager$SelectSet.doSelect(SelectorManager.java:498)
        at org.mortbay.io.nio.SelectorManager.doSelect(SelectorManager.java:192)
        at org.mortbay.jetty.nio.SelectChannelConnector.accept(SelectChannelConnector.java:124)
        at org.mortbay.jetty.AbstractConnector$Acceptor.run(AbstractConnector.java:708)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeResourceMonitor@4133b793" daemon prio=5 tid=56 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeResourceMonitor.run(FSNamesystem.java:3776)
        at java.lang.Thread.run(Thread.java:745)
"Socket Reader #1 for port 32944"  prio=5 tid=34 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:848)
        at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:827)
"IPC Server handler 9 on 32944" daemon prio=5 tid=51 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 0 on 32944" daemon prio=5 tid=42 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Socket Reader #1 for port 41476"  prio=5 tid=76 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:848)
        at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:827)
"VolumeScannerThread(/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data/data1)" daemon prio=5 tid=99 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:613)
"pool-6-thread-1"  prio=5 tid=66 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"nioEventLoopGroup-2-1"  prio=10 tid=73 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:621)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:309)
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:703)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 2 on 32944" daemon prio=5 tid=44 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-3" daemon prio=5 tid=69 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"java.util.concurrent.ThreadPoolExecutor$Worker@65dda5be[State = -1, empty queue]" daemon prio=5 tid=113 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server listener on 41476" daemon prio=5 tid=75 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener.run(Server.java:901)
"Finalizer" daemon prio=8 tid=3 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
        at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:189)
"IPC Server handler 3 on 32944" daemon prio=5 tid=45 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.util.JvmPauseMonitor$Monitor@78688290" daemon prio=5 tid=74 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.util.JvmPauseMonitor$Monitor.run(JvmPauseMonitor.java:192)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 1 on 41476" daemon prio=5 tid=85 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 9 on 41476" daemon prio=5 tid=93 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Signal Dispatcher" daemon prio=9 tid=4 runnable
java.lang.Thread.State: RUNNABLE
"IPC Server handler 6 on 41476" daemon prio=5 tid=90 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-2" daemon prio=5 tid=27 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server handler 8 on 41476" daemon prio=5 tid=92 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"ReplicationMonitor" daemon prio=5 tid=28 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor.run(BlockManager.java:4174)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 7 on 32944" daemon prio=5 tid=49 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Block report processor" daemon prio=5 tid=30 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
        at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.processQueue(BlockManager.java:4511)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.run(BlockManager.java:4500)
"IPC Server handler 5 on 41476" daemon prio=5 tid=89 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 7 on 41476" daemon prio=5 tid=91 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"2082014042@qtp-1695356235-1 - Acceptor0 SelectChannelConnector@localhost:40764" daemon prio=5 tid=68 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.mortbay.io.nio.SelectorManager$SelectSet.doSelect(SelectorManager.java:498)
        at org.mortbay.io.nio.SelectorManager.doSelect(SelectorManager.java:192)
        at org.mortbay.jetty.nio.SelectChannelConnector.accept(SelectChannelConnector.java:124)
        at org.mortbay.jetty.AbstractConnector$Acceptor.run(AbstractConnector.java:708)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
"StorageInfoMonitor" daemon prio=5 tid=29 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$StorageInfoDefragmenter.run(BlockManager.java:4209)
        at java.lang.Thread.run(Thread.java:745)
"client DomainSocketWatcher" daemon prio=5 tid=96 runnable
java.lang.Thread.State: RUNNABLE
        at org.apache.hadoop.net.unix.DomainSocketWatcher.doPoll0(Native Method)
        at org.apache.hadoop.net.unix.DomainSocketWatcher.access$900(DomainSocketWatcher.java:52)
        at org.apache.hadoop.net.unix.DomainSocketWatcher$2.run(DomainSocketWatcher.java:511)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 0 on 41476" daemon prio=5 tid=84 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server Responder" daemon prio=5 tid=36 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.apache.hadoop.ipc.Server$Responder.doRunLoop(Server.java:1079)
        at org.apache.hadoop.ipc.Server$Responder.run(Server.java:1062)
"pool-4-thread-1"  prio=5 tid=53 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"CacheReplicationMonitor(1697303292)"  prio=5 tid=59 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2176)
        at org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor.run(CacheReplicationMonitor.java:182)
"499094800@qtp-923628031-0" daemon prio=5 tid=23 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:626)
"IPC Server Responder" daemon prio=5 tid=78 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.apache.hadoop.ipc.Server$Responder.doRunLoop(Server.java:1079)
        at org.apache.hadoop.ipc.Server$Responder.run(Server.java:1062)
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl$LazyWriter@77fd070c" daemon prio=5 tid=101 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl$LazyWriter.run(FsDatasetImpl.java:3048)
        at java.lang.Thread.run(Thread.java:745)
"main"  prio=5 tid=1 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.lang.Thread.join(Thread.java:1289)
        at org.junit.internal.runners.statements.FailOnTimeout.evaluateStatement(FailOnTimeout.java:26)
        at org.junit.internal.runners.statements.FailOnTimeout.evaluate(FailOnTimeout.java:17)
        at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
        at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
        at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
        at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
        at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
        at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
        at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
        at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
        at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
"Reference Handler" daemon prio=10 tid=2 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:133)
"datanode DomainSocketWatcher" daemon prio=5 tid=65 runnable
java.lang.Thread.State: RUNNABLE
        at org.apache.hadoop.net.unix.DomainSocketWatcher.doPoll0(Native Method)
        at org.apache.hadoop.net.unix.DomainSocketWatcher.access$900(DomainSocketWatcher.java:52)
        at org.apache.hadoop.net.unix.DomainSocketWatcher$2.run(DomainSocketWatcher.java:511)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.util.JvmPauseMonitor$Monitor@6a039436" daemon prio=5 tid=20 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.util.JvmPauseMonitor$Monitor.run(JvmPauseMonitor.java:192)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 1 on 32944" daemon prio=5 tid=43 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeEditLogRoller@3e66af6b" daemon prio=5 tid=57 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeEditLogRoller.run(FSNamesystem.java:3818)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.hdfs.server.blockmanagement.HeartbeatManager$Monitor@3879e379" daemon prio=5 tid=31 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.HeartbeatManager$Monitor.run(HeartbeatManager.java:421)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 5 on 32944" daemon prio=5 tid=47 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-0" daemon prio=5 tid=25 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"DecommissionMonitor-0" daemon prio=5 tid=40 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"Timer-1" daemon prio=5 tid=26 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server handler 4 on 41476" daemon prio=5 tid=88 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 3 on 41476" daemon prio=5 tid=87 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.datanode.DataXceiverServer@42fe4b97" daemon prio=5 tid=64 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)
        at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:241)
        at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:100)
        at org.apache.hadoop.hdfs.net.TcpPeerServer.accept(TcpPeerServer.java:85)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:145)
        at java.lang.Thread.run(Thread.java:745)
"AsyncAppender-Dispatcher-Thread-36" daemon prio=5 tid=52 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at org.apache.log4j.AsyncAppender$Dispatcher.run(AsyncAppender.java:548)
        at java.lang.Thread.run(Thread.java:745)
"AsyncAppender-Dispatcher-Thread-59" daemon prio=5 tid=82 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at org.apache.log4j.AsyncAppender$Dispatcher.run(AsyncAppender.java:548)
        at java.lang.Thread.run(Thread.java:745)



Stack Trace:
java.util.concurrent.TimeoutException: Timed out waiting for condition. Thread diagnostics:
Timestamp: 2016-05-12 02:42:08,190

"VolumeScannerThread(/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data/data2)" daemon prio=5 tid=100 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:613)
"IPC Server handler 4 on 32944" daemon prio=5 tid=46 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.namenode.LeaseManager$Monitor@749c2306" daemon prio=5 tid=55 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.LeaseManager$Monitor.run(LeaseManager.java:339)
        at java.lang.Thread.run(Thread.java:745)
"Thread-86"  prio=5 tid=114 runnable
java.lang.Thread.State: RUNNABLE
        at java.lang.Thread.dumpThreads(Native Method)
        at java.lang.Thread.getAllStackTraces(Thread.java:1640)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDump(TimedOutTestsListener.java:87)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDiagnosticString(TimedOutTestsListener.java:73)
        at org.apache.hadoop.test.GenericTestUtils.waitFor(GenericTestUtils.java:269)
        at org.apache.hadoop.hdfs.DFSTestUtil.verifyExpectedCacheUsage(DFSTestUtil.java:1453)
        at org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCache.testPageRounder(TestFsDatasetCache.java:476)
        at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
        at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
        at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
        at java.lang.reflect.Method.invoke(Method.java:606)
        at org.junit.runners.model.FrameworkMethod$1.runReflectiveCall(FrameworkMethod.java:47)
        at org.junit.internal.runners.model.ReflectiveCallable.run(ReflectiveCallable.java:12)
        at org.junit.runners.model.FrameworkMethod.invokeExplosively(FrameworkMethod.java:44)
        at org.junit.internal.runners.statements.InvokeMethod.evaluate(InvokeMethod.java:17)
        at org.junit.internal.runners.statements.FailOnTimeout$StatementThread.run(FailOnTimeout.java:74)
"org.apache.hadoop.hdfs.server.blockmanagement.PendingReconstructionBlocks$PendingReconstructionMonitor@53a46a8f" daemon prio=5 tid=39 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.PendingReconstructionBlocks$PendingReconstructionMonitor.run(PendingReconstructionBlocks.java:223)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 8 on 32944" daemon prio=5 tid=50 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server idle connection scanner for port 32944" daemon prio=5 tid=35 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server listener on 32944" daemon prio=5 tid=33 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener.run(Server.java:901)
"IPC Server handler 6 on 32944" daemon prio=5 tid=48 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"pool-2-thread-1"  prio=5 tid=22 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"853413050@qtp-1695356235-0" daemon prio=5 tid=67 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:626)
"Timer-4" daemon prio=5 tid=70 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"pool-9-thread-1"  prio=5 tid=83 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 2 on 41476" daemon prio=5 tid=86 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-5" daemon prio=5 tid=71 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server idle connection scanner for port 41476" daemon prio=5 tid=77 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"Timer for 'NameNode' metrics system" daemon prio=5 tid=19 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$LazyPersistFileScrubber@3444c36" daemon prio=5 tid=58 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$LazyPersistFileScrubber.run(FSNamesystem.java:3905)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner" daemon prio=5 tid=60 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
        at org.apache.hadoop.fs.FileSystem$Statistics$StatisticsDataReferenceCleaner.run(FileSystem.java:3231)
        at java.lang.Thread.run(Thread.java:745)
"141209778@qtp-923628031-1 - Acceptor0 SelectChannelConnector@localhost:41197" daemon prio=5 tid=24 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.mortbay.io.nio.SelectorManager$SelectSet.doSelect(SelectorManager.java:498)
        at org.mortbay.io.nio.SelectorManager.doSelect(SelectorManager.java:192)
        at org.mortbay.jetty.nio.SelectChannelConnector.accept(SelectChannelConnector.java:124)
        at org.mortbay.jetty.AbstractConnector$Acceptor.run(AbstractConnector.java:708)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeResourceMonitor@4133b793" daemon prio=5 tid=56 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeResourceMonitor.run(FSNamesystem.java:3776)
        at java.lang.Thread.run(Thread.java:745)
"Socket Reader #1 for port 32944"  prio=5 tid=34 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:848)
        at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:827)
"IPC Server handler 9 on 32944" daemon prio=5 tid=51 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 0 on 32944" daemon prio=5 tid=42 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Socket Reader #1 for port 41476"  prio=5 tid=76 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:848)
        at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:827)
"VolumeScannerThread(/home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/test/data/2/dfs/data/data1)" daemon prio=5 tid=99 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:613)
"pool-6-thread-1"  prio=5 tid=66 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"nioEventLoopGroup-2-1"  prio=10 tid=73 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:621)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:309)
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:703)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 2 on 32944" daemon prio=5 tid=44 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-3" daemon prio=5 tid=69 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"java.util.concurrent.ThreadPoolExecutor$Worker@65dda5be[State = -1, empty queue]" daemon prio=5 tid=113 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server listener on 41476" daemon prio=5 tid=75 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:102)
        at org.apache.hadoop.ipc.Server$Listener.run(Server.java:901)
"Finalizer" daemon prio=8 tid=3 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:135)
        at java.lang.ref.ReferenceQueue.remove(ReferenceQueue.java:151)
        at java.lang.ref.Finalizer$FinalizerThread.run(Finalizer.java:189)
"IPC Server handler 3 on 32944" daemon prio=5 tid=45 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.util.JvmPauseMonitor$Monitor@78688290" daemon prio=5 tid=74 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.util.JvmPauseMonitor$Monitor.run(JvmPauseMonitor.java:192)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 1 on 41476" daemon prio=5 tid=85 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 9 on 41476" daemon prio=5 tid=93 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Signal Dispatcher" daemon prio=9 tid=4 runnable
java.lang.Thread.State: RUNNABLE
"IPC Server handler 6 on 41476" daemon prio=5 tid=90 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-2" daemon prio=5 tid=27 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server handler 8 on 41476" daemon prio=5 tid=92 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"ReplicationMonitor" daemon prio=5 tid=28 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$ReplicationMonitor.run(BlockManager.java:4174)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 7 on 32944" daemon prio=5 tid=49 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Block report processor" daemon prio=5 tid=30 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2043)
        at java.util.concurrent.ArrayBlockingQueue.take(ArrayBlockingQueue.java:374)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.processQueue(BlockManager.java:4511)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$BlockReportProcessingThread.run(BlockManager.java:4500)
"IPC Server handler 5 on 41476" daemon prio=5 tid=89 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 7 on 41476" daemon prio=5 tid=91 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"2082014042@qtp-1695356235-1 - Acceptor0 SelectChannelConnector@localhost:40764" daemon prio=5 tid=68 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.mortbay.io.nio.SelectorManager$SelectSet.doSelect(SelectorManager.java:498)
        at org.mortbay.io.nio.SelectorManager.doSelect(SelectorManager.java:192)
        at org.mortbay.jetty.nio.SelectChannelConnector.accept(SelectChannelConnector.java:124)
        at org.mortbay.jetty.AbstractConnector$Acceptor.run(AbstractConnector.java:708)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:582)
"StorageInfoMonitor" daemon prio=5 tid=29 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.BlockManager$StorageInfoDefragmenter.run(BlockManager.java:4209)
        at java.lang.Thread.run(Thread.java:745)
"client DomainSocketWatcher" daemon prio=5 tid=96 runnable
java.lang.Thread.State: RUNNABLE
        at org.apache.hadoop.net.unix.DomainSocketWatcher.doPoll0(Native Method)
        at org.apache.hadoop.net.unix.DomainSocketWatcher.access$900(DomainSocketWatcher.java:52)
        at org.apache.hadoop.net.unix.DomainSocketWatcher$2.run(DomainSocketWatcher.java:511)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 0 on 41476" daemon prio=5 tid=84 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server Responder" daemon prio=5 tid=36 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.apache.hadoop.ipc.Server$Responder.doRunLoop(Server.java:1079)
        at org.apache.hadoop.ipc.Server$Responder.run(Server.java:1062)
"pool-4-thread-1"  prio=5 tid=53 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"CacheReplicationMonitor(1697303292)"  prio=5 tid=59 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2176)
        at org.apache.hadoop.hdfs.server.blockmanagement.CacheReplicationMonitor.run(CacheReplicationMonitor.java:182)
"499094800@qtp-923628031-0" daemon prio=5 tid=23 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.mortbay.thread.QueuedThreadPool$PoolThread.run(QueuedThreadPool.java:626)
"IPC Server Responder" daemon prio=5 tid=78 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:87)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:98)
        at org.apache.hadoop.ipc.Server$Responder.doRunLoop(Server.java:1079)
        at org.apache.hadoop.ipc.Server$Responder.run(Server.java:1062)
"org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl$LazyWriter@77fd070c" daemon prio=5 tid=101 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetImpl$LazyWriter.run(FsDatasetImpl.java:3048)
        at java.lang.Thread.run(Thread.java:745)
"main"  prio=5 tid=1 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.lang.Thread.join(Thread.java:1289)
        at org.junit.internal.runners.statements.FailOnTimeout.evaluateStatement(FailOnTimeout.java:26)
        at org.junit.internal.runners.statements.FailOnTimeout.evaluate(FailOnTimeout.java:17)
        at org.junit.internal.runners.statements.RunBefores.evaluate(RunBefores.java:26)
        at org.junit.internal.runners.statements.RunAfters.evaluate(RunAfters.java:27)
        at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:271)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
        at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
        at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
        at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
        at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
        at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
        at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
"Reference Handler" daemon prio=10 tid=2 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at java.lang.ref.Reference$ReferenceHandler.run(Reference.java:133)
"datanode DomainSocketWatcher" daemon prio=5 tid=65 runnable
java.lang.Thread.State: RUNNABLE
        at org.apache.hadoop.net.unix.DomainSocketWatcher.doPoll0(Native Method)
        at org.apache.hadoop.net.unix.DomainSocketWatcher.access$900(DomainSocketWatcher.java:52)
        at org.apache.hadoop.net.unix.DomainSocketWatcher$2.run(DomainSocketWatcher.java:511)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.util.JvmPauseMonitor$Monitor@6a039436" daemon prio=5 tid=20 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.util.JvmPauseMonitor$Monitor.run(JvmPauseMonitor.java:192)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 1 on 32944" daemon prio=5 tid=43 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeEditLogRoller@3e66af6b" daemon prio=5 tid=57 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.namenode.FSNamesystem$NameNodeEditLogRoller.run(FSNamesystem.java:3818)
        at java.lang.Thread.run(Thread.java:745)
"org.apache.hadoop.hdfs.server.blockmanagement.HeartbeatManager$Monitor@3879e379" daemon prio=5 tid=31 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.server.blockmanagement.HeartbeatManager$Monitor.run(HeartbeatManager.java:421)
        at java.lang.Thread.run(Thread.java:745)
"IPC Server handler 5 on 32944" daemon prio=5 tid=47 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"Timer-0" daemon prio=5 tid=25 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"DecommissionMonitor-0" daemon prio=5 tid=40 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:1090)
        at java.util.concurrent.ScheduledThreadPoolExecutor$DelayedWorkQueue.take(ScheduledThreadPoolExecutor.java:807)
        at java.util.concurrent.ThreadPoolExecutor.getTask(ThreadPoolExecutor.java:1068)
        at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1130)
        at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
        at java.lang.Thread.run(Thread.java:745)
"Timer-1" daemon prio=5 tid=26 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"IPC Server handler 4 on 41476" daemon prio=5 tid=88 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"IPC Server handler 3 on 41476" daemon prio=5 tid=87 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:226)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2082)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:218)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2387)
"org.apache.hadoop.hdfs.server.datanode.DataXceiverServer@42fe4b97" daemon prio=5 tid=64 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)
        at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:241)
        at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:100)
        at org.apache.hadoop.hdfs.net.TcpPeerServer.accept(TcpPeerServer.java:85)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:145)
        at java.lang.Thread.run(Thread.java:745)
"AsyncAppender-Dispatcher-Thread-36" daemon prio=5 tid=52 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at org.apache.log4j.AsyncAppender$Dispatcher.run(AsyncAppender.java:548)
        at java.lang.Thread.run(Thread.java:745)
"AsyncAppender-Dispatcher-Thread-59" daemon prio=5 tid=82 in Object.wait()
java.lang.Thread.State: WAITING (on object monitor)
        at java.lang.Object.wait(Native Method)
        at java.lang.Object.wait(Object.java:503)
        at org.apache.log4j.AsyncAppender$Dispatcher.run(AsyncAppender.java:548)
        at java.lang.Thread.run(Thread.java:745)


	at org.apache.hadoop.test.GenericTestUtils.waitFor(GenericTestUtils.java:269)
	at org.apache.hadoop.hdfs.DFSTestUtil.verifyExpectedCacheUsage(DFSTestUtil.java:1453)
	at org.apache.hadoop.hdfs.server.datanode.TestFsDatasetCache.testPageRounder(TestFsDatasetCache.java:476)