You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2016/01/30 05:34:28 UTC

Build failed in Jenkins: Hadoop-Hdfs-trunk-Java8 #851

See <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/851/changes>

Changes:

[jlowe] YARN-4428. Redirect RM page to AHS page when AHS turned on and RM page

------------------------------------------
[...truncated 8671 lines...]
"LeaseRenewer:jenkins@localhost:34256" daemon prio=5 tid=515 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.hdfs.client.impl.LeaseRenewer.run(LeaseRenewer.java:437)
        at org.apache.hadoop.hdfs.client.impl.LeaseRenewer.access$700(LeaseRenewer.java:76)
        at org.apache.hadoop.hdfs.client.impl.LeaseRenewer$1.run(LeaseRenewer.java:310)
        at java.lang.Thread.run(Thread.java:744)
"org.apache.hadoop.hdfs.server.datanode.DataXceiverServer@571899bb" daemon prio=5 tid=317 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.ServerSocketChannelImpl.accept0(Native Method)
        at sun.nio.ch.ServerSocketChannelImpl.accept(ServerSocketChannelImpl.java:241)
        at sun.nio.ch.ServerSocketAdaptor.accept(ServerSocketAdaptor.java:100)
        at org.apache.hadoop.hdfs.net.TcpPeerServer.accept(TcpPeerServer.java:83)
        at org.apache.hadoop.hdfs.server.datanode.DataXceiverServer.run(DataXceiverServer.java:145)
        at java.lang.Thread.run(Thread.java:744)
"IPC Server handler 1 on 36804" daemon prio=5 tid=186 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at sun.misc.Unsafe.park(Native Method)
        at java.util.concurrent.locks.LockSupport.parkNanos(LockSupport.java:215)
        at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.awaitNanos(AbstractQueuedSynchronizer.java:2078)
        at java.util.concurrent.LinkedBlockingQueue.poll(LinkedBlockingQueue.java:467)
        at org.apache.hadoop.ipc.CallQueueManager.take(CallQueueManager.java:129)
        at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2269)
"VolumeScannerThread(<https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data/data3)"> daemon prio=5 tid=375 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:614)
"refreshUsed-<https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data/data2/current/BP-1450978555-67.195.81.152-1454128188468"> daemon prio=5 tid=407 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at org.apache.hadoop.fs.DU$DURefreshThread.run(DU.java:115)
        at java.lang.Thread.run(Thread.java:744)
"nioEventLoopGroup-20-19"  prio=10 tid=600 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:614)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:361)
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:703)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
        at java.lang.Thread.run(Thread.java:744)
"main"  prio=5 tid=1 runnable
java.lang.Thread.State: RUNNABLE
        at java.lang.Thread.dumpThreads(Native Method)
        at java.lang.Thread.getAllStackTraces(Thread.java:1602)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDump(TimedOutTestsListener.java:87)
        at org.apache.hadoop.test.TimedOutTestsListener.buildThreadDiagnosticString(TimedOutTestsListener.java:73)
        at org.apache.hadoop.test.TimedOutTestsListener.testFailure(TimedOutTestsListener.java:62)
        at org.junit.runner.notification.RunNotifier$4.notifyListener(RunNotifier.java:139)
        at org.junit.runner.notification.RunNotifier$SafeNotifier.run(RunNotifier.java:61)
        at org.junit.runner.notification.RunNotifier.fireTestFailures(RunNotifier.java:134)
        at org.junit.runner.notification.RunNotifier.fireTestFailure(RunNotifier.java:128)
        at org.junit.internal.runners.model.EachTestNotifier.addFailure(EachTestNotifier.java:23)
        at org.junit.runners.ParentRunner.runLeaf(ParentRunner.java:275)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:70)
        at org.junit.runners.BlockJUnit4ClassRunner.runChild(BlockJUnit4ClassRunner.java:50)
        at org.junit.runners.ParentRunner$3.run(ParentRunner.java:238)
        at org.junit.runners.ParentRunner$1.schedule(ParentRunner.java:63)
        at org.junit.runners.ParentRunner.runChildren(ParentRunner.java:236)
        at org.junit.runners.ParentRunner.access$000(ParentRunner.java:53)
        at org.junit.runners.ParentRunner$2.evaluate(ParentRunner.java:229)
        at org.junit.runners.ParentRunner.run(ParentRunner.java:309)
        at org.apache.maven.surefire.junit4.JUnit4Provider.execute(JUnit4Provider.java:264)
        at org.apache.maven.surefire.junit4.JUnit4Provider.executeTestSet(JUnit4Provider.java:153)
        at org.apache.maven.surefire.junit4.JUnit4Provider.invoke(JUnit4Provider.java:124)
        at org.apache.maven.surefire.booter.ForkedBooter.invokeProviderInSameClassLoader(ForkedBooter.java:200)
        at org.apache.maven.surefire.booter.ForkedBooter.runSuitesInProcess(ForkedBooter.java:153)
        at org.apache.maven.surefire.booter.ForkedBooter.main(ForkedBooter.java:103)
"IPC Server idle connection scanner for port 43637" daemon prio=5 tid=86 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"nioEventLoopGroup-20-7"  prio=10 tid=588 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Thread.sleep(Native Method)
        at io.netty.util.concurrent.SingleThreadEventExecutor.confirmShutdown(SingleThreadEventExecutor.java:614)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:361)
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:703)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
        at java.lang.Thread.run(Thread.java:744)
"Socket Reader #1 for port 36804"  prio=5 tid=178 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:101)
        at org.apache.hadoop.ipc.Server$Listener$Reader.doRunLoop(Server.java:750)
        at org.apache.hadoop.ipc.Server$Listener$Reader.run(Server.java:729)
"VolumeScannerThread(<https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/hadoop-hdfs/target/test/data/dfs/data/data2)"> daemon prio=5 tid=384 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at org.apache.hadoop.hdfs.server.datanode.VolumeScanner.run(VolumeScanner.java:614)
"Timer-24" daemon prio=5 tid=292 timed_waiting
java.lang.Thread.State: TIMED_WAITING
        at java.lang.Object.wait(Native Method)
        at java.util.TimerThread.mainLoop(Timer.java:552)
        at java.util.TimerThread.run(Timer.java:505)
"nioEventLoopGroup-20-1"  prio=10 tid=355 runnable
java.lang.Thread.State: RUNNABLE
        at sun.nio.ch.EPollArrayWrapper.epollWait(Native Method)
        at sun.nio.ch.EPollArrayWrapper.poll(EPollArrayWrapper.java:269)
        at sun.nio.ch.EPollSelectorImpl.doSelect(EPollSelectorImpl.java:79)
        at sun.nio.ch.SelectorImpl.lockAndDoSelect(SelectorImpl.java:86)
        at sun.nio.ch.SelectorImpl.select(SelectorImpl.java:97)
        at io.netty.channel.nio.NioEventLoop.select(NioEventLoop.java:621)
        at io.netty.channel.nio.NioEventLoop.run(NioEventLoop.java:309)
        at io.netty.util.concurrent.SingleThreadEventExecutor$4.run(SingleThreadEventExecutor.java:703)
        at io.netty.util.concurrent.DefaultThreadFactory$DefaultRunnableDecorator.run(DefaultThreadFactory.java:137)
Tests run: 4, Failures: 0, Errors: 1, Skipped: 0, Time elapsed: 62.717 sec <<< FAILURE! - in org.apache.hadoop.hdfs.TestCrcCorruption
testCorruptionDuringWrt(org.apache.hadoop.hdfs.TestCrcCorruption)  Time elapsed: 50.146 sec  <<< ERROR!
java.lang.Exception: test timed out after 50000 milliseconds
	at java.lang.Object.wait(Native Method)
	at org.apache.hadoop.hdfs.DataStreamer.waitForAckedSeqno(DataStreamer.java:763)
	at org.apache.hadoop.hdfs.DFSOutputStream.flushInternal(DFSOutputStream.java:700)
	at org.apache.hadoop.hdfs.DFSOutputStream.closeImpl(DFSOutputStream.java:781)
	at org.apache.hadoop.hdfs.DFSOutputStream.close(DFSOutputStream.java:758)
	at org.apache.hadoop.fs.FSDataOutputStream$PositionCache.close(FSDataOutputStream.java:72)
	at org.apache.hadoop.fs.FSDataOutputStream.close(FSDataOutputStream.java:101)
	at org.apache.hadoop.hdfs.TestCrcCorruption.testCorruptionDuringWrt(TestCrcCorruption.java:136)

        at java.lang.Thread.run(Thread
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestGetBlocks
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 42.318 sec - in org.apache.hadoop.hdfs.TestGetBlocks
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure020
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.279 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure020
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDataTransferProtocol
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.456 sec - in org.apache.hadoop.hdfs.TestDataTransferProtocol
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestReadWhileWriting
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.89 sec - in org.apache.hadoop.hdfs.TestReadWhileWriting
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 116.037 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestKeyProviderCache
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.498 sec - in org.apache.hadoop.hdfs.TestKeyProviderCache
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.net.TestNetworkTopology
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 10.31 sec - in org.apache.hadoop.net.TestNetworkTopology
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.tracing.TestTraceAdmin
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.307 sec - in org.apache.hadoop.tracing.TestTraceAdmin
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.672 sec - in org.apache.hadoop.tracing.TestTracingShortCircuitLocalRead
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.tracing.TestTracing
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.384 sec - in org.apache.hadoop.tracing.TestTracing
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.TestRefreshCallQueue
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.196 sec - in org.apache.hadoop.TestRefreshCallQueue

Results :

Tests in error: 
  TestOpenFilesWithSnapshot.testOpenFilesWithRename:210 » IO Timed out waiting f...
  TestCrcCorruption.testCorruptionDuringWrt:136->Object.wait:-2 »  test timed ou...

Tests run: 4147, Failures: 0, Errors: 2, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:01 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  03:55 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.056 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:59 h
[INFO] Finished at: 2016-01-30T04:34:24+00:00
[INFO] Final Memory: 56M/473M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results

Hadoop-Hdfs-trunk-Java8 - Build # 852 - Still Failing

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/852/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 5994 lines...]
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:02 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  03:51 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.072 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:55 h
[INFO] Finished at: 2016-02-01T09:42:06+00:00
[INFO] Final Memory: 56M/459M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk-Java8/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
FAILED:  org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.testExceedsCapacity

Error Message:
Pending cached list of 127.0.0.1:45490 is not empty, [{blockId=1073741841, replication=1, mark=true}]

Stack Trace:
java.lang.AssertionError: Pending cached list of 127.0.0.1:45490 is not empty, [{blockId=1073741841, replication=1, mark=true}]
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.assertTrue(Assert.java:41)
	at org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.checkPendingCachedEmpty(TestCacheDirectives.java:1480)
	at org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.testExceedsCapacity(TestCacheDirectives.java:1503)



Jenkins build is back to normal : Hadoop-Hdfs-trunk-Java8 #853

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/853/changes>


Build failed in Jenkins: Hadoop-Hdfs-trunk-Java8 #852

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/852/changes>

Changes:

[rohithsharmaks] YARN-4615. Fix random test failure in

------------------------------------------
[...truncated 5801 lines...]
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 3.618 sec - in org.apache.hadoop.hdfs.util.TestStripedBlockUtil
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.util.TestXMLUtils
Tests run: 6, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.104 sec - in org.apache.hadoop.hdfs.util.TestXMLUtils
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.util.TestLightWeightHashSet
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.201 sec - in org.apache.hadoop.hdfs.util.TestLightWeightHashSet
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.util.TestMD5FileUtils
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.356 sec - in org.apache.hadoop.hdfs.util.TestMD5FileUtils
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.util.TestLightWeightLinkedSet
Tests run: 17, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.208 sec - in org.apache.hadoop.hdfs.util.TestLightWeightLinkedSet
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.util.TestAtomicFileOutputStream
Tests run: 4, Failures: 0, Errors: 0, Skipped: 1, Time elapsed: 0.293 sec - in org.apache.hadoop.hdfs.util.TestAtomicFileOutputStream
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestLease
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 12.705 sec - in org.apache.hadoop.hdfs.TestLease
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 18.17 sec - in org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestHFlush
Tests run: 14, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 44.041 sec - in org.apache.hadoop.hdfs.TestHFlush
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestErasureCodingPolicies
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.204 sec - in org.apache.hadoop.hdfs.TestErasureCodingPolicies
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestRemoteBlockReader
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.347 sec - in org.apache.hadoop.hdfs.TestRemoteBlockReader
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestHdfsAdmin
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.892 sec - in org.apache.hadoop.hdfs.TestHdfsAdmin
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDistributedFileSystem
Tests run: 18, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 66.531 sec - in org.apache.hadoop.hdfs.TestDistributedFileSystem
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestRollingUpgradeRollback
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.059 sec - in org.apache.hadoop.hdfs.TestRollingUpgradeRollback
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestRollingUpgrade
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 131.614 sec - in org.apache.hadoop.hdfs.TestRollingUpgrade
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDatanodeDeath
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 69.05 sec - in org.apache.hadoop.hdfs.TestDatanodeDeath
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestCrcCorruption
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 19.644 sec - in org.apache.hadoop.hdfs.TestCrcCorruption
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestFsShellPermission
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.574 sec - in org.apache.hadoop.hdfs.TestFsShellPermission
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.TestLocatedBlock
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.248 sec - in org.apache.hadoop.hdfs.protocol.TestLocatedBlock
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.TestLayoutVersion
Tests run: 11, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.258 sec - in org.apache.hadoop.hdfs.protocol.TestLayoutVersion
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.datatransfer.sasl.TestSaslDataTransfer
Tests run: 8, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 40.669 sec - in org.apache.hadoop.hdfs.protocol.datatransfer.sasl.TestSaslDataTransfer
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.datatransfer.TestPacketReceiver
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.421 sec - in org.apache.hadoop.hdfs.protocol.datatransfer.TestPacketReceiver
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.TestAnnotations
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.131 sec - in org.apache.hadoop.hdfs.protocol.TestAnnotations
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.protocol.TestBlockListAsLongs
Tests run: 7, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 1.036 sec - in org.apache.hadoop.hdfs.protocol.TestBlockListAsLongs
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure170
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 101.125 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure170
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure190
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 102.984 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure190
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSAddressConfig
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.855 sec - in org.apache.hadoop.hdfs.TestDFSAddressConfig
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSConfigKeys
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.213 sec - in org.apache.hadoop.hdfs.TestDFSConfigKeys
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestParallelUnixDomainRead
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 35.316 sec - in org.apache.hadoop.hdfs.TestParallelUnixDomainRead
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure140
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 52.454 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure140
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure100
Tests run: 10, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 49.674 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure100
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestReplication
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 57.916 sec - in org.apache.hadoop.hdfs.TestReplication
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestRead
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.239 sec - in org.apache.hadoop.hdfs.TestRead
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestPipelines
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.543 sec - in org.apache.hadoop.hdfs.TestPipelines
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 114.769 sec - in org.apache.hadoop.hdfs.TestDFSStripedOutputStreamWithFailure
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDeprecatedKeys
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 0.546 sec - in org.apache.hadoop.hdfs.TestDeprecatedKeys
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestAclsEndToEnd
Tests run: 9, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 80.597 sec - in org.apache.hadoop.hdfs.TestAclsEndToEnd
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestParallelShortCircuitReadNoChecksum
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.105 sec - in org.apache.hadoop.hdfs.TestParallelShortCircuitReadNoChecksum
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestParallelShortCircuitRead
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 13.374 sec - in org.apache.hadoop.hdfs.TestParallelShortCircuitRead
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestHDFSTrash
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 6.357 sec - in org.apache.hadoop.hdfs.TestHDFSTrash
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestFileAppend
Tests run: 12, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 41.524 sec - in org.apache.hadoop.hdfs.TestFileAppend
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSRemove
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 15.758 sec - in org.apache.hadoop.hdfs.TestDFSRemove
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestErasureCodingPolicyWithSnapshot
Tests run: 4, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 24.901 sec - in org.apache.hadoop.hdfs.TestErasureCodingPolicyWithSnapshot
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSRollback
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.784 sec - in org.apache.hadoop.hdfs.TestDFSRollback
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestReadWhileWriting
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 7.871 sec - in org.apache.hadoop.hdfs.TestReadWhileWriting
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestConnCache
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 4.894 sec - in org.apache.hadoop.hdfs.TestConnCache
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestPersistBlocks
Tests run: 5, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 27.054 sec - in org.apache.hadoop.hdfs.TestPersistBlocks
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestSetrepDecreasing
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 17.276 sec - in org.apache.hadoop.hdfs.TestSetrepDecreasing
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDatanodeLayoutUpgrade
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.653 sec - in org.apache.hadoop.hdfs.TestDatanodeLayoutUpgrade
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestFileCorruption
Tests run: 3, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 9.613 sec - in org.apache.hadoop.hdfs.TestFileCorruption
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestDFSStartupVersions
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 14.432 sec - in org.apache.hadoop.hdfs.TestDFSStartupVersions
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestWriteConfigurationToDFS
Tests run: 1, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.877 sec - in org.apache.hadoop.hdfs.TestWriteConfigurationToDFS
Java HotSpot(TM) 64-Bit Server VM warning: ignoring option MaxPermSize=768m; support was removed in 8.0
Running org.apache.hadoop.hdfs.TestListFilesInDFS
Tests run: 2, Failures: 0, Errors: 0, Skipped: 0, Time elapsed: 5.306 sec - in org.apache.hadoop.hdfs.TestListFilesInDFS

Results :

Failed tests: 
  TestCacheDirectives.testExceedsCapacity:1503->checkPendingCachedEmpty:1480 Pending cached list of 127.0.0.1:45490 is not empty, [{blockId=1073741841, replication=1, mark=true}]

Tests run: 4147, Failures: 1, Errors: 0, Skipped: 17

[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS Native Client
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HttpFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS BookKeeper Journal
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Skipping Apache Hadoop HDFS-NFS
[INFO] This project has been banned from the build due to previous failures.
[INFO] ------------------------------------------------------------------------
[INFO]                                                                         
[INFO] ------------------------------------------------------------------------
[INFO] Building Apache Hadoop HDFS Project 3.0.0-SNAPSHOT
[INFO] ------------------------------------------------------------------------
[INFO] 
[INFO] --- maven-clean-plugin:2.5:clean (default-clean) @ hadoop-hdfs-project ---
[INFO] Deleting <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/target>
[INFO] 
[INFO] --- maven-antrun-plugin:1.7:run (create-testdirs) @ hadoop-hdfs-project ---
[INFO] Executing tasks

main:
    [mkdir] Created dir: <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/target/test-dir>
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Not executing Javadoc as the project is not a Java classpath-capable package
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [04:02 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  03:51 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.072 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 03:55 h
[INFO] Finished at: 2016-02-01T09:42:06+00:00
[INFO] Final Memory: 56M/459M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to <https://builds.apache.org/job/Hadoop-Hdfs-trunk-Java8/ws/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports> for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results