You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Hudson Server <hu...@hudson.apache.org> on 2010/12/22 07:23:30 UTC

Hadoop-Hdfs-22-branch - Build # 5 - Still Failing

See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/5/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3369 lines...]
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 18.358 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 41.517 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.936 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 220.64 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 439.793 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 35.946 sec

checkfailure:

run-test-hdfs-excluding-commit-and-smoke:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 1.952 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 6.082 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 18.589 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 44.927 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.904 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 220.621 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 414.015 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.207 sec

checkfailure:

run-test-hdfs-all-withtestcaseonly:

run-test-hdfs:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:725: Tests failed!

Total time: 104 minutes 47 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
4 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestHDFSTrash.testTrashEmptier

Error Message:
null

Stack Trace:
junit.framework.AssertionFailedError: null
	at org.apache.hadoop.fs.TestTrash.testTrashEmptier(TestTrash.java:473)
	at junit.extensions.TestDecorator.basicRun(TestDecorator.java:24)
	at junit.extensions.TestSetup$1.protect(TestSetup.java:23)
	at junit.extensions.TestSetup.run(TestSetup.java:27)


FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.EPollArrayWrapper.epollCreate(Native Method)
	at sun.nio.ch.EPollArrayWrapper.<init>(EPollArrayWrapper.java:68)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:52)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Responder.<init>(Server.java:602)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1501)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:615)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1332)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1350)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1403)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:201)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:435)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of a16b74ef257941883d505cb50a11adf1 but expecting 5b91b7e0b8c77e1dcfe06a77f658ad97

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of a16b74ef257941883d505cb50a11adf1 but expecting 5b91b7e0b8c77e1dcfe06a77f658ad97
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410tm(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 31 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/31/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3548 lines...]
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target
     [echo]  Including clover.jar in the war file ...
[cactifywar] Analyzing war: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/hdfsproxy-2.0-test.war
[cactifywar] Building war: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/test.war

cactifywar:

test-cactus:
     [echo]  Free Ports: startup-41552 / http-41553 / https-41554
     [echo] Please take a deep breath while Cargo gets the Tomcat for running the servlet tests...
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/webapps
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/temp
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/logs
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/reports
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
   [cactus] -----------------------------------------------------------------
   [cactus] Running tests against Tomcat 5.x @ http://localhost:41553
   [cactus] -----------------------------------------------------------------
   [cactus] Deploying [/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/test.war] to [/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/webapps]...
   [cactus] Tomcat 5.x starting...
Server [Apache-Coyote/1.1] started
   [cactus] WARNING: multiple versions of ant detected in path for junit 
   [cactus]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
   [cactus]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
   [cactus] Running org.apache.hadoop.hdfsproxy.TestAuthorizationFilter
   [cactus] Tests run: 4, Failures: 2, Errors: 0, Time elapsed: 0.497 sec
   [cactus] Test org.apache.hadoop.hdfsproxy.TestAuthorizationFilter FAILED
   [cactus] Running org.apache.hadoop.hdfsproxy.TestLdapIpDirFilter
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.577 sec
   [cactus] Tomcat 5.x started on port [41553]
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyFilter
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.358 sec
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyForwardServlet
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.316 sec
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyUtil
   [cactus] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.855 sec
   [cactus] Tomcat 5.x is stopping...
   [cactus] Tomcat 5.x is stopped

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:749: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:730: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/contrib/build.xml:48: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/contrib/hdfsproxy/build.xml:343: Tests failed!

Total time: 60 minutes 38 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.testPathPermit

Error Message:
expected:<403> but was:<200>

Stack Trace:
junit.framework.AssertionFailedError: expected:<403> but was:<200>
	at org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.endPathPermit(TestAuthorizationFilter.java:113)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callGenericEndMethod(ClientTestCaseCaller.java:442)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callEndMethod(ClientTestCaseCaller.java:209)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.runTest(ClientTestCaseCaller.java:149)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBareClient(AbstractCactusTestCase.java:218)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBare(AbstractCactusTestCase.java:134)


FAILED:  org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.testPathPermitQualified

Error Message:
expected:<403> but was:<200>

Stack Trace:
junit.framework.AssertionFailedError: expected:<403> but was:<200>
	at org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.endPathPermitQualified(TestAuthorizationFilter.java:136)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callGenericEndMethod(ClientTestCaseCaller.java:442)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callEndMethod(ClientTestCaseCaller.java:209)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.runTest(ClientTestCaseCaller.java:149)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBareClient(AbstractCactusTestCase.java:218)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBare(AbstractCactusTestCase.java:134)




Hadoop-Hdfs-22-branch - Build # 30 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/30/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3318 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.156 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.249 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 15.641 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 33.541 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.809 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 211.456 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 463.717 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 35.344 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745: Tests failed!

Total time: 50 minutes 16 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.server.namenode.TestBlockTokenWithDFS.testEnd2End

Error Message:
127.0.0.1:55394is not an underUtilized node

Stack Trace:
junit.framework.AssertionFailedError: 127.0.0.1:55394is not an underUtilized node
	at org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:1011)
	at org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:953)
	at org.apache.hadoop.hdfs.server.balancer.Balancer.run(Balancer.java:1496)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.runBalancer(TestBalancer.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.test(TestBalancer.java:234)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.oneNodeTest(TestBalancer.java:307)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.integrationTest(TestBalancer.java:319)
	at org.apache.hadoop.hdfs.server.namenode.TestBlockTokenWithDFS.__CLR3_0_2wspf0n10tj(TestBlockTokenWithDFS.java:529)
	at org.apache.hadoop.hdfs.server.namenode.TestBlockTokenWithDFS.testEnd2End(TestBlockTokenWithDFS.java:526)




Hadoop-Hdfs-22-branch - Build # 29 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/29/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3320 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.207 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.608 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 15.64 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 43.384 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.501 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 210.721 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 416.025 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 35.602 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745: Tests failed!

Total time: 50 minutes 44 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0

Error Message:
127.0.0.1:59191is not an underUtilized node

Stack Trace:
junit.framework.AssertionFailedError: 127.0.0.1:59191is not an underUtilized node
	at org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:1011)
	at org.apache.hadoop.hdfs.server.balancer.Balancer.initNodes(Balancer.java:953)
	at org.apache.hadoop.hdfs.server.balancer.Balancer.run(Balancer.java:1496)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.runBalancer(TestBalancer.java:247)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.test(TestBalancer.java:234)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.twoNodeTest(TestBalancer.java:312)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.__CLR3_0_29j3j5bp34(TestBalancer.java:328)
	at org.apache.hadoop.hdfs.server.balancer.TestBalancer.testBalancer0(TestBalancer.java:324)


REGRESSION:  org.apache.hadoop.hdfs.server.datanode.TestBlockReport.blockReport_08

Error Message:
Was waiting too long for a replica to become TEMPORARY

Stack Trace:
junit.framework.AssertionFailedError: Was waiting too long for a replica to become TEMPORARY
	at org.apache.hadoop.hdfs.server.datanode.TestBlockReport.waitForTempReplica(TestBlockReport.java:514)
	at org.apache.hadoop.hdfs.server.datanode.TestBlockReport.__CLR3_0_2j2e00j11c8(TestBlockReport.java:408)
	at org.apache.hadoop.hdfs.server.datanode.TestBlockReport.blockReport_08(TestBlockReport.java:390)




Hadoop-Hdfs-22-branch - Build # 28 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/28/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2384 lines...]
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy

clean-sign:

sign:

signanddeploy:

simpledeploy:
[artifact:install-provider] Installing provider: org.apache.maven.wagon:wagon-http:jar:1.0-beta-2:runtime
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-http/1.0-beta-2/wagon-http-1.0-beta-2.pom from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-providers/1.0-beta-2/wagon-providers-1.0-beta-2.pom from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon/1.0-beta-2/wagon-1.0-beta-2.pom from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-http-shared/1.0-beta-2/wagon-http-shared-1.0-beta-2.pom from central
[artifact:install-provider] Downloading: jtidy/jtidy/4aug2000r7-dev/jtidy-4aug2000r7-dev.pom from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-provider-api/1.0-beta-2/wagon-provider-api-1.0-beta-2.pom from central
[artifact:install-provider] Downloading: commons-logging/commons-logging/1.0.3/commons-logging-1.0.3.pom from central
[artifact:install-provider] Downloading: commons-httpclient/commons-httpclient/2.0.2/commons-httpclient-2.0.2.pom from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-http/1.0-beta-2/wagon-http-1.0-beta-2.jar from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-http-shared/1.0-beta-2/wagon-http-shared-1.0-beta-2.jar from central
[artifact:install-provider] Downloading: jtidy/jtidy/4aug2000r7-dev/jtidy-4aug2000r7-dev.jar from central
[artifact:install-provider] Downloading: org/apache/maven/wagon/wagon-provider-api/1.0-beta-2/wagon-provider-api-1.0-beta-2.jar from central
[artifact:install-provider] Downloading: org/codehaus/plexus/plexus-utils/1.0.4/plexus-utils-1.0.4.jar from central
[artifact:install-provider] Downloading: commons-logging/commons-logging/1.0.3/commons-logging-1.0.3.jar from central
[artifact:install-provider] Downloading: commons-httpclient/commons-httpclient/2.0.2/commons-httpclient-2.0.2.jar from central
[artifact:deploy] Deploying to https://repository.apache.org/content/repositories/snapshots
[artifact:deploy] [INFO] Retrieving previous build number from apache.snapshots.https
[artifact:deploy] Uploading: org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110307.223307-340.jar to apache.snapshots.https
[artifact:deploy] Uploaded 1013K
[artifact:deploy] An error has occurred while processing the Maven artifact tasks.
[artifact:deploy]  Diagnosis:
[artifact:deploy] 
[artifact:deploy] Error deploying artifact 'org.apache.hadoop:hadoop-hdfs:jar': Error deploying artifact: Failed to transfer file: https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110307.223307-340.jar. Return code is: 401

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:1669: Error deploying artifact 'org.apache.hadoop:hadoop-hdfs:jar': Error deploying artifact: Failed to transfer file: https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110307.223307-340.jar. Return code is: 401

Total time: 1 minute 45 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/test/findbugs': No such file or directory
Build Failed
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
No tests ran.

Hadoop-Hdfs-22-branch - Build # 27 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/27/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3300 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.287 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.499 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 15.502 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 38.621 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.427 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 211.601 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 398.931 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 35.447 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745: Tests failed!

Total time: 103 minutes 25 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
REGRESSION:  org.apache.hadoop.cli.TestHDFSCLI.testAll

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


REGRESSION:  org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery.testErrorReplicas

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.




Hadoop-Hdfs-22-branch - Build # 26 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/26/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3520 lines...]
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target
     [echo]  Including clover.jar in the war file ...
[cactifywar] Analyzing war: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/hdfsproxy-2.0-test.war
[cactifywar] Building war: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/test.war

cactifywar:

test-cactus:
     [echo]  Free Ports: startup-41060 / http-41061 / https-41062
     [echo] Please take a deep breath while Cargo gets the Tomcat for running the servlet tests...
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/webapps
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/temp
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/logs
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/reports
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/conf
   [cactus] -----------------------------------------------------------------
   [cactus] Running tests against Tomcat 5.x @ http://localhost:41061
   [cactus] -----------------------------------------------------------------
   [cactus] Deploying [/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/test.war] to [/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/contrib/hdfsproxy/target/tomcat-config/webapps]...
   [cactus] Tomcat 5.x starting...
Server [Apache-Coyote/1.1] started
   [cactus] WARNING: multiple versions of ant detected in path for junit 
   [cactus]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
   [cactus]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
   [cactus] Running org.apache.hadoop.hdfsproxy.TestAuthorizationFilter
   [cactus] Tests run: 4, Failures: 2, Errors: 0, Time elapsed: 0.681 sec
   [cactus] Test org.apache.hadoop.hdfsproxy.TestAuthorizationFilter FAILED
   [cactus] Running org.apache.hadoop.hdfsproxy.TestLdapIpDirFilter
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.331 sec
   [cactus] Tomcat 5.x started on port [41061]
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyFilter
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.341 sec
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyForwardServlet
   [cactus] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.342 sec
   [cactus] Running org.apache.hadoop.hdfsproxy.TestProxyUtil
   [cactus] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.856 sec
   [cactus] Tomcat 5.x is stopping...
   [cactus] Tomcat 5.x is stopped

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:749: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:730: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/contrib/build.xml:48: The following error occurred while executing this line:
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/contrib/hdfsproxy/build.xml:343: Tests failed!

Total time: 59 minutes 19 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.testPathPermit

Error Message:
expected:<403> but was:<200>

Stack Trace:
junit.framework.AssertionFailedError: expected:<403> but was:<200>
	at org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.endPathPermit(TestAuthorizationFilter.java:113)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callGenericEndMethod(ClientTestCaseCaller.java:442)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callEndMethod(ClientTestCaseCaller.java:209)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.runTest(ClientTestCaseCaller.java:149)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBareClient(AbstractCactusTestCase.java:218)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBare(AbstractCactusTestCase.java:134)


FAILED:  org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.testPathPermitQualified

Error Message:
expected:<403> but was:<200>

Stack Trace:
junit.framework.AssertionFailedError: expected:<403> but was:<200>
	at org.apache.hadoop.hdfsproxy.TestAuthorizationFilter.endPathPermitQualified(TestAuthorizationFilter.java:136)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callGenericEndMethod(ClientTestCaseCaller.java:442)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.callEndMethod(ClientTestCaseCaller.java:209)
	at org.apache.cactus.internal.client.ClientTestCaseCaller.runTest(ClientTestCaseCaller.java:149)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBareClient(AbstractCactusTestCase.java:218)
	at org.apache.cactus.internal.AbstractCactusTestCase.runBare(AbstractCactusTestCase.java:134)




Hadoop-Hdfs-22-branch - Build # 25 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/25/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2808 lines...]
    [junit] 	at org.apache.hadoop.ipc.Server$Handler.run(Server.java:1404)
    [junit] )
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend3
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.TestFileAppend3 FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.TestFileCorruption
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 593.727 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileStatus
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 150.045 sec
    [junit] Running org.apache.hadoop.hdfs.TestGetBlocks
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 234.16 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSServerPorts
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 67.82 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSTrash
    [junit] Tests run: 3, Failures: 1, Errors: 0, Time elapsed: 319.184 sec
    [junit] Test org.apache.hadoop.hdfs.TestHDFSTrash FAILED
    [junit] Running org.apache.hadoop.hdfs.TestLease
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 234.076 sec
    [junit] Running org.apache.hadoop.hdfs.TestLeaseRecovery
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 499.446 sec
    [junit] Running org.apache.hadoop.hdfs.TestLocalDFS
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 149.927 sec
    [junit] Running org.apache.hadoop.hdfs.TestMissingBlocksAlert
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 151.653 sec
    [junit] Running org.apache.hadoop.hdfs.TestPread
    [junit] Running org.apache.hadoop.hdfs.TestPread
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.TestPread FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.TestQuota
    [junit] Running org.apache.hadoop.hdfs.TestQuota
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.TestQuota FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.TestRestartDFS
    [junit] Running org.apache.hadoop.hdfs.TestRestartDFS
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.TestRestartDFS FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.TestSafeMode
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 194.815 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockReplacement
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 421.968 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 150.91 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDiskError
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 638.218 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestInterDatanodeProtocol
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 656.247 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestSimulatedFSDataset
    [junit] Tests run: 8, Failures: 0, Errors: 0, Time elapsed: 0.703 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBackupNode
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
11 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestDFSShell.testErrOutPut

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestDFSUpgradeFromImage.testUpgradeFromImage

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestDistributedFileSystem.testAllWithDualPort

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestFileAppend.testComplexFlush

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestFileAppend2.testComplexAppend

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestFileAppend3.testAppendToPartialChunk

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestHDFSTrash.testTrashEmptier

Error Message:
null

Stack Trace:
junit.framework.AssertionFailedError: null
	at org.apache.hadoop.fs.TestTrash.testTrashEmptier(TestTrash.java:460)
	at junit.extensions.TestDecorator.basicRun(TestDecorator.java:24)
	at junit.extensions.TestSetup$1.protect(TestSetup.java:23)
	at junit.extensions.TestSetup.run(TestSetup.java:27)


FAILED:  org.apache.hadoop.hdfs.TestPread.testPreadDFSSimulated

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestQuota.testMultipleFilesSmallerThanOneBlock

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  org.apache.hadoop.hdfs.TestRestartDFS.testRestartDualPortDFS

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.


FAILED:  TEST-org.apache.hadoop.hdfs.server.namenode.TestBackupNode.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.server.namenode.TestBackupNode.xml was length 0



Hadoop-Hdfs-22-branch - Build # 24 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/24/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2708 lines...]
ivy-resolve-test:

ivy-retrieve-test:

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache

run-commit-test:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataDirs
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.525 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestGetImageServlet
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.48 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestINodeFile
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.225 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestNNLeaseRecovery
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 2.348 sec

checkfailure:
    [touch] Creating /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/testsfailed
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.cli.TestHDFSCLI
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  TEST-org.apache.hadoop.cli.TestHDFSCLI.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.cli.TestHDFSCLI.xml was length 0

REGRESSION:  org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery.testErrorReplicas

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.




Hadoop-Hdfs-22-branch - Build # 23 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/23/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2966 lines...]
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 37.143 sec
    [junit] Running org.apache.hadoop.hdfs.TestLeaseRecovery2
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 72.192 sec
    [junit] Running org.apache.hadoop.hdfs.TestListFilesInDFS
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.605 sec
    [junit] Running org.apache.hadoop.hdfs.TestListFilesInFileContext
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 2.541 sec
    [junit] Running org.apache.hadoop.hdfs.TestListPathServlet
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.507 sec
    [junit] Running org.apache.hadoop.hdfs.TestModTime
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.857 sec
    [junit] Running org.apache.hadoop.hdfs.TestMultiThreadedHflush
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 13.599 sec
    [junit] Running org.apache.hadoop.hdfs.TestPipelines
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.217 sec
    [junit] Running org.apache.hadoop.hdfs.TestReadWhileWriting
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 5.271 sec
    [junit] Running org.apache.hadoop.hdfs.TestRenameWhileOpen
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 51.069 sec
    [junit] Running org.apache.hadoop.hdfs.TestReplication
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 23.609 sec
    [junit] Running org.apache.hadoop.hdfs.TestSeekBug
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.799 sec
    [junit] Running org.apache.hadoop.hdfs.TestSetTimes
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 8.028 sec
    [junit] Running org.apache.hadoop.hdfs.TestSetrepDecreasing
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.912 sec
    [junit] Running org.apache.hadoop.hdfs.TestSetrepIncreasing
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 27.698 sec
    [junit] Running org.apache.hadoop.hdfs.TestSmallBlock
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 4.381 sec
    [junit] Running org.apache.hadoop.hdfs.TestWriteConfigurationToDFS
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.095 sec
    [junit] Running org.apache.hadoop.hdfs.security.TestDelegationTokenForProxyUser
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.276 sec
    [junit] Running org.apache.hadoop.hdfs.security.token.block.TestBlockToken
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.871 sec
    [junit] Running org.apache.hadoop.hdfs.server.balancer.TestBalancer
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 45.653 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestDistributedUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 27.093 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestGetUriFromString
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.158 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestJspHelper
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.964 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockReport
Build timed out. Aborting
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockReport
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.server.datanode.TestBlockReport FAILED (crashed)
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
3 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
java.io.FileNotFoundException: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/classes/hdfs-default.xml (Too many open files)

Stack Trace:
java.lang.RuntimeException: java.io.FileNotFoundException: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/classes/hdfs-default.xml (Too many open files)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:1536)
	at org.apache.hadoop.conf.Configuration.loadResources(Configuration.java:1401)
	at org.apache.hadoop.conf.Configuration.getProps(Configuration.java:1347)
	at org.apache.hadoop.conf.Configuration.set(Configuration.java:600)
	at org.apache.hadoop.conf.Configuration.setBoolean(Configuration.java:794)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:313)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:302)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.__CLR3_0_2k9gmsjsbs(TestFileConcurrentReader.java:285)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer(TestFileConcurrentReader.java:284)
Caused by: java.io.FileNotFoundException: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/classes/hdfs-default.xml (Too many open files)
	at java.io.FileInputStream.open(Native Method)
	at java.io.FileInputStream.<init>(FileInputStream.java:106)
	at java.io.FileInputStream.<init>(FileInputStream.java:66)
	at sun.net.www.protocol.file.FileURLConnection.connect(FileURLConnection.java:70)
	at sun.net.www.protocol.file.FileURLConnection.getInputStream(FileURLConnection.java:161)
	at com.sun.org.apache.xerces.internal.impl.XMLEntityManager.setupCurrentEntity(XMLEntityManager.java:653)
	at com.sun.org.apache.xerces.internal.impl.XMLVersionDetector.determineDocVersion(XMLVersionDetector.java:186)
	at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:772)
	at com.sun.org.apache.xerces.internal.parsers.XML11Configuration.parse(XML11Configuration.java:737)
	at com.sun.org.apache.xerces.internal.parsers.XMLParser.parse(XMLParser.java:119)
	at com.sun.org.apache.xerces.internal.parsers.DOMParser.parse(DOMParser.java:235)
	at com.sun.org.apache.xerces.internal.jaxp.DocumentBuilderImpl.parse(DocumentBuilderImpl.java:284)
	at javax.xml.parsers.DocumentBuilder.parse(DocumentBuilder.java:180)
	at org.apache.hadoop.conf.Configuration.loadResource(Configuration.java:1450)


FAILED:  org.apache.hadoop.hdfs.server.datanode.TestBlockReport.blockReport_06

Error Message:
Forked Java VM exited abnormally. Please note the time in the report does not reflect the time until the VM exit.

Stack Trace:
junit.framework.AssertionFailedError: Forked Java VM exited abnormally. Please note the time in the report does not reflect the time until the VM exit.


FAILED:  TEST-org.apache.hadoop.hdfs.server.datanode.TestDataNodeMXBean.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.server.datanode.TestDataNodeMXBean.xml was length 0



Hadoop-Hdfs-22-branch - Build # 22 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/22/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2906 lines...]
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientRetries
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 43.019 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSPermission
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 17.339 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSRemove
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.454 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSStartupVersions
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 19.253 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 27.149 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUtil
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.17 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeBlockScanner
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 97.15 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeConfig
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.451 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeDeath
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 129.106 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeRegistration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.808 sec
    [junit] Running org.apache.hadoop.hdfs.TestDecommission
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 32.382 sec
    [junit] Running org.apache.hadoop.hdfs.TestDeprecatedKeys
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.257 sec
    [junit] Running org.apache.hadoop.hdfs.TestDfsOverAvroRpc
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.394 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend4
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.728 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileConcurrentReader
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 31.532 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.992 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 9.609 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationDelete
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.825 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationEmpty
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.136 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
    [junit] Tests run: 28, Failures: 0, Errors: 0, Time elapsed: 35.695 sec
    [junit] Running org.apache.hadoop.hdfs.TestHFlush
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 19.144 sec
    [junit] Running org.apache.hadoop.hdfs.TestHftpFileSystem
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 2.738 sec
    [junit] Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.365 sec
    [junit] Running org.apache.hadoop.hdfs.TestLargeBlock
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 44.525 sec
    [junit] Running org.apache.hadoop.hdfs.TestLeaseRecovery2
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.TestLeaseRecovery2.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestLeaseRecovery2.xml was length 0



Hadoop-Hdfs-22-branch - Build # 21 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/21/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2948 lines...]
    [junit] 	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:1578)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1558)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
    [junit] 	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
    [junit] 	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
    [junit] 	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
    [junit] 	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)
    [junit] 	at junit.framework.TestCase.runBare(TestCase.java:132)
    [junit] 	at junit.framework.TestResult$1.protect(TestResult.java:110)
    [junit] 	at junit.framework.TestResult.runProtected(TestResult.java:128)
    [junit] 	at junit.framework.TestResult.run(TestResult.java:113)
    [junit] 	at junit.framework.TestCase.run(TestCase.java:124)
    [junit] 	at junit.framework.TestSuite.runTest(TestSuite.java:232)
    [junit] 	at junit.framework.TestSuite.run(TestSuite.java:227)
    [junit] 	at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)
    [junit] 	at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)
    [junit] 	at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768)
    [junit] Caused by: java.io.IOException: java.io.IOException: error=24, Too many open files
    [junit] 	at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
    [junit] 	at java.lang.ProcessImpl.start(ProcessImpl.java:65)
    [junit] 	at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)
    [junit] 	... 34 more
    [junit] )
    [junit] Tests run: 7, Failures: 1, Errors: 2, Time elapsed: 14.51 sec
    [junit] Test org.apache.hadoop.hdfs.TestFileConcurrentReader FAILED
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.466 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 9.569 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationDelete
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.936 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationEmpty
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.12 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
    [junit] Tests run: 28, Failures: 0, Errors: 0, Time elapsed: 35.957 sec
    [junit] Running org.apache.hadoop.hdfs.TestHFlush
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 19.276 sec
    [junit] Running org.apache.hadoop.hdfs.TestHftpFileSystem
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 2.907 sec
    [junit] Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.024 sec
    [junit] Running org.apache.hadoop.hdfs.TestLargeBlock
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
5 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorTransferToVerySmallWrite

Error Message:
error occurred, see log above

Stack Trace:
junit.framework.AssertionFailedError: error occurred, see log above
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:391)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:302)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.__CLR3_0_2u5mf5tsbn(TestFileConcurrentReader.java:275)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorTransferToVerySmallWrite(TestFileConcurrentReader.java:274)


REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
Error while running command to get file permissions : java.io.IOException: Cannot run program "/bin/ls": java.io.IOException: error=24, Too many open files  at java.lang.ProcessBuilder.start(ProcessBuilder.java:459)  at org.apache.hadoop.util.Shell.runCommand(Shell.java:201)  at org.apache.hadoop.util.Shell.run(Shell.java:183)  at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:376)  at org.apache.hadoop.util.Shell.execCommand(Shell.java:462)  at org.apache.hadoop.util.Shell.execCommand(Shell.java:445)  at org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:560)  at org.apache.hadoop.fs.RawLocalFileSystem.access$100(RawLocalFileSystem.java:49)  at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:481)  at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:456)  at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)  at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)  at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:1578)  at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1558)  at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)  at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)  at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)  at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)  at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)  at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)  at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)  at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)  at junit.framework.TestCase.runBare(TestCase.java:132)  at junit.framework.TestResult$1.protect(TestResult.java:110)  at junit.framework.TestResult.runProtected(TestResult.java:128)  at junit.framework.TestResult.run(TestResult.java:113)  at junit.framework.TestCase.run(TestCase.java:124)  at junit.framework.TestSuite.runTest(TestSuite.java:232)  at junit.framework.TestSuite.run(TestSuite.java:227)  at org.junit.internal.runners.JUnit38ClassRunner.run(JUnit38ClassRunner.java:83)  at junit.framework.JUnit4TestAdapter.run(JUnit4TestAdapter.java:39)  at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.run(JUnitTestRunner.java:420)  at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.launch(JUnitTestRunner.java:911)  at org.apache.tools.ant.taskdefs.optional.junit.JUnitTestRunner.main(JUnitTestRunner.java:768) Caused by: java.io.IOException: java.io.IOException: error=24, Too many open files  at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)  at java.lang.ProcessImpl.start(ProcessImpl.java:65)  at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)  ... 34 more 

Stack Trace:
java.lang.RuntimeException: Error while running command to get file permissions : java.io.IOException: Cannot run program "/bin/ls": java.io.IOException: error=24, Too many open files
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:459)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:201)
	at org.apache.hadoop.util.Shell.run(Shell.java:183)
	at org.apache.hadoop.util.Shell$ShellCommandExecutor.execute(Shell.java:376)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:462)
	at org.apache.hadoop.util.Shell.execCommand(Shell.java:445)
	at org.apache.hadoop.fs.RawLocalFileSystem.execCommand(RawLocalFileSystem.java:560)
	at org.apache.hadoop.fs.RawLocalFileSystem.access$100(RawLocalFileSystem.java:49)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:481)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:456)
	at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)
	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:1578)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1558)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)
Caused by: java.io.IOException: java.io.IOException: error=24, Too many open files
	at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
	at java.lang.ProcessImpl.start(ProcessImpl.java:65)
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)

	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.loadPermissionInfo(RawLocalFileSystem.java:506)
	at org.apache.hadoop.fs.RawLocalFileSystem$RawLocalFileStatus.getPermission(RawLocalFileSystem.java:456)
	at org.apache.hadoop.util.DiskChecker.mkdirsWithExistsAndPermissionCheck(DiskChecker.java:131)
	at org.apache.hadoop.util.DiskChecker.checkDir(DiskChecker.java:148)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.getDataDirsFromURIs(DataNode.java:1578)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1558)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:615)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1332)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1350)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1403)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:201)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:435)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  TEST-org.apache.hadoop.hdfs.TestLargeBlock.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestLargeBlock.xml was length 0

FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 7e34d8652a17c1edf9091af4ca1ad6b4 but expecting 14263a87b080eda2a0af927d04587c6d

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 7e34d8652a17c1edf9091af4ca1ad6b4 but expecting 14263a87b080eda2a0af927d04587c6d
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410vh(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 20 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/20/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2945 lines...]
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientRetries
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 45.561 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSPermission
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 17.075 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSRemove
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.212 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSStartupVersions
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.938 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 24.749 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUtil
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.174 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeBlockScanner
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 94.131 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeConfig
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.632 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeDeath
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 120.056 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeRegistration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.998 sec
    [junit] Running org.apache.hadoop.hdfs.TestDecommission
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 32.312 sec
    [junit] Running org.apache.hadoop.hdfs.TestDeprecatedKeys
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.266 sec
    [junit] Running org.apache.hadoop.hdfs.TestDfsOverAvroRpc
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.849 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend4
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.607 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileConcurrentReader
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 26.07 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.328 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 9.435 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationDelete
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.961 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationEmpty
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.137 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
    [junit] Tests run: 28, Failures: 0, Errors: 0, Time elapsed: 33.897 sec
    [junit] Running org.apache.hadoop.hdfs.TestHFlush
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 19.459 sec
    [junit] Running org.apache.hadoop.hdfs.TestHftpFileSystem
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 2.932 sec
    [junit] Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 13.84 sec
    [junit] Running org.apache.hadoop.hdfs.TestLargeBlock
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 41.482 sec
    [junit] Running org.apache.hadoop.hdfs.TestLeaseRecovery2
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.TestLeaseRecovery2.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestLeaseRecovery2.xml was length 0

FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 3ebbb7dd4967409332031e39542a773f but expecting 8d536543fc611337555a86f79d006ea8

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 3ebbb7dd4967409332031e39542a773f but expecting 8d536543fc611337555a86f79d006ea8
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410vf(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 19 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/19/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2701 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/cache

run-commit-test:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery FAILED (timeout)
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataDirs
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.524 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestGetImageServlet
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.699 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestINodeFile
    [junit] Tests run: 6, Failures: 0, Errors: 0, Time elapsed: 0.253 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestNNLeaseRecovery
    [junit] Tests run: 11, Failures: 0, Errors: 0, Time elapsed: 2.476 sec

checkfailure:
    [touch] Creating /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/testsfailed
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/logs
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.cli.TestHDFSCLI
Build timed out. Aborting
    [junit] Running org.apache.hadoop.cli.TestHDFSCLI
    [junit] Tests run: 1, Failures: 0, Errors: 1, Time elapsed: 0 sec
    [junit] Test org.apache.hadoop.cli.TestHDFSCLI FAILED (crashed)
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
REGRESSION:  org.apache.hadoop.cli.TestHDFSCLI.testAll

Error Message:
Forked Java VM exited abnormally. Please note the time in the report does not reflect the time until the VM exit.

Stack Trace:
junit.framework.AssertionFailedError: Forked Java VM exited abnormally. Please note the time in the report does not reflect the time until the VM exit.


REGRESSION:  org.apache.hadoop.hdfs.server.datanode.TestBlockRecovery.testErrorReplicas

Error Message:
Timeout occurred. Please note the time in the report does not reflect the time until the timeout.

Stack Trace:
junit.framework.AssertionFailedError: Timeout occurred. Please note the time in the report does not reflect the time until the timeout.




Hadoop-Hdfs-22-branch - Build # 18 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/18/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 1027 lines...]
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/webapps/datanode/WEB-INF
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/webapps/secondary/WEB-INF
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/ant
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/c++
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/hdfs/classes
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/extraconf
    [touch] Creating /tmp/null1831926762
   [delete] Deleting: /tmp/null1831926762
     [copy] Copying 3 files to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/webapps
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/conf
     [copy] Copying /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/conf/hdfs-site.xml.template to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/conf/hdfs-site.xml
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/conf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/conf
     [copy] Copying /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/conf/hdfs-site.xml.template to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/conf/hdfs-site.xml

compile-hdfs-classes:
    [javac] Compiling 210 source files to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/classes
    [javac] /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/java/org/apache/hadoop/hdfs/server/namenode/SecondaryNameNode.java:200: cannot find symbol
    [javac] symbol  : method get(int)
    [javac] location: class java.lang.String[]
    [javac]                 Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0));
    [javac]                                                                  ^
    [javac] /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/src/java/org/apache/hadoop/hdfs/tools/DFSck.java:200: cannot find symbol
    [javac] symbol  : method get(int)
    [javac] location: class java.lang.String[]
    [javac]           Krb5AndCertsSslSocketConnector.KRB5_CIPHER_SUITES.get(0));
    [javac]                                                            ^
    [javac] Note: Some input files use or override a deprecated API.
    [javac] Note: Recompile with -Xlint:deprecation for details.
    [javac] 2 errors

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:345: Compile failed; see the compiler error output for details.

Total time: 11 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/*.tar.gz': No such file or directory
mv: cannot stat `build/*.jar': No such file or directory
mv: cannot stat `build/test/findbugs': No such file or directory
mv: cannot stat `build/docs/api': No such file or directory
Build Failed
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
No tests ran.

Hadoop-Hdfs-22-branch - Build # 17 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/17/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2952 lines...]
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.865 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientRetries
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 43.763 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSPermission
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 17.073 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSRemove
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.29 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSStartupVersions
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.506 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 25.337 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUtil
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.178 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeBlockScanner
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 173.495 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeConfig
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.652 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeDeath
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 118.922 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeRegistration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.808 sec
    [junit] Running org.apache.hadoop.hdfs.TestDecommission
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 32.296 sec
    [junit] Running org.apache.hadoop.hdfs.TestDeprecatedKeys
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.258 sec
    [junit] Running org.apache.hadoop.hdfs.TestDfsOverAvroRpc
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.813 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend4
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.582 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileConcurrentReader
    [junit] Tests run: 7, Failures: 0, Errors: 1, Time elapsed: 28.952 sec
    [junit] Test org.apache.hadoop.hdfs.TestFileConcurrentReader FAILED
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.749 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 9.404 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationDelete
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 12.89 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationEmpty
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.154 sec
    [junit] Running org.apache.hadoop.hdfs.TestHDFSFileSystemContract
    [junit] Tests run: 28, Failures: 0, Errors: 0, Time elapsed: 34.049 sec
    [junit] Running org.apache.hadoop.hdfs.TestHFlush
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 17.668 sec
    [junit] Running org.apache.hadoop.hdfs.TestHftpFileSystem
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 2.964 sec
    [junit] Running org.apache.hadoop.hdfs.TestInjectionForSimulatedStorage
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.491 sec
    [junit] Running org.apache.hadoop.hdfs.TestLargeBlock
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
3 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.TestLargeBlock.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestLargeBlock.xml was length 0

FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.IOUtil.initPipe(Native Method)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:49)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:318)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1491)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of cd8893542d5e8c56dfef3941fd8ec876 but expecting 11d51f16d506b702ba3469b15a5528b6

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of cd8893542d5e8c56dfef3941fd8ec876 but expecting 11d51f16d506b702ba3469b15a5528b6
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410u7(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 16 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/16/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2903 lines...]
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 13.096 sec
    [junit] Running org.apache.hadoop.fs.permission.TestStickyBit
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 9.424 sec
    [junit] Running org.apache.hadoop.hdfs.TestBlockMissingException
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 18.076 sec
    [junit] Running org.apache.hadoop.hdfs.TestByteRangeInputStream
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.15 sec
    [junit] Running org.apache.hadoop.hdfs.TestClientBlockVerification
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 3.084 sec
    [junit] Running org.apache.hadoop.hdfs.TestClientProtocolForPipelineRecovery
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.417 sec
    [junit] Running org.apache.hadoop.hdfs.TestCrcCorruption
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 24.048 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.838 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientRetries
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 46.043 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSPermission
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 16.944 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSRemove
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 13.98 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSStartupVersions
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 16.646 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 25.361 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUtil
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.194 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeBlockScanner
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 176.79 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeConfig
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.528 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeDeath
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 121.615 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeRegistration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.966 sec
    [junit] Running org.apache.hadoop.hdfs.TestDecommission
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 32.299 sec
    [junit] Running org.apache.hadoop.hdfs.TestDeprecatedKeys
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.23 sec
    [junit] Running org.apache.hadoop.hdfs.TestDfsOverAvroRpc
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.87 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend4
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.496 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileConcurrentReader
    [junit] Tests run: 7, Failures: 0, Errors: 2, Time elapsed: 16.123 sec
    [junit] Test org.apache.hadoop.hdfs.TestFileConcurrentReader FAILED
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.422 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
4 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.EPollArrayWrapper.epollCreate(Native Method)
	at sun.nio.ch.EPollArrayWrapper.<init>(EPollArrayWrapper.java:68)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:52)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Responder.<init>(Server.java:602)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1500)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  TEST-org.apache.hadoop.hdfs.TestFileCreationClient.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestFileCreationClient.xml was length 0

FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:615)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1332)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1350)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1403)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:201)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:435)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 5ad6a5df552c34cdc28eafa437bd5174 but expecting ba126587d2ac7280b3267ada10a8ce52

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 5ad6a5df552c34cdc28eafa437bd5174 but expecting ba126587d2ac7280b3267ada10a8ce52
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410u7(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 15 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/15/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3325 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.081 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 6.594 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 18.944 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 40.844 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.357 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 223.932 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 421.438 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.495 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745: Tests failed!

Total time: 57 minutes 16 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot run program "du": java.io.IOException: error=24, Too many open files

Stack Trace:
java.io.IOException: Cannot run program "du": java.io.IOException: error=24, Too many open files
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:459)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:201)
	at org.apache.hadoop.util.Shell.run(Shell.java:183)
	at org.apache.hadoop.fs.DU.<init>(DU.java:57)
	at org.apache.hadoop.fs.DU.<init>(DU.java:67)
	at org.apache.hadoop.hdfs.server.datanode.FSDataset$FSVolume.<init>(FSDataset.java:342)
	at org.apache.hadoop.hdfs.server.datanode.FSDataset.<init>(FSDataset.java:873)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initFsDataSet(DataNode.java:395)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:500)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)
Caused by: java.io.IOException: java.io.IOException: error=24, Too many open files
	at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
	at java.lang.ProcessImpl.start(ProcessImpl.java:65)
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 8bf868156dde47bb957560242860c3ad but expecting 06606af537702d3aa265b44f2db14073

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 8bf868156dde47bb957560242860c3ad but expecting 06606af537702d3aa265b44f2db14073
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410ua(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 14 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/14/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3301 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.198 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 6.388 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 19.541 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 41.847 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.279 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 224.898 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 419.382 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.388 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:745: Tests failed!

Total time: 66 minutes 42 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot run program "du": java.io.IOException: error=24, Too many open files

Stack Trace:
java.io.IOException: Cannot run program "du": java.io.IOException: error=24, Too many open files
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:459)
	at org.apache.hadoop.util.Shell.runCommand(Shell.java:201)
	at org.apache.hadoop.util.Shell.run(Shell.java:183)
	at org.apache.hadoop.fs.DU.<init>(DU.java:57)
	at org.apache.hadoop.fs.DU.<init>(DU.java:67)
	at org.apache.hadoop.hdfs.server.datanode.FSDataset$FSVolume.<init>(FSDataset.java:342)
	at org.apache.hadoop.hdfs.server.datanode.FSDataset.<init>(FSDataset.java:873)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initFsDataSet(DataNode.java:395)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:500)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:315)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.runTestUnfinishedBlockCRCError(TestFileConcurrentReader.java:302)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.__CLR3_0_2wjxr3fsbt(TestFileConcurrentReader.java:290)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite(TestFileConcurrentReader.java:289)
Caused by: java.io.IOException: java.io.IOException: error=24, Too many open files
	at java.lang.UNIXProcess.<init>(UNIXProcess.java:148)
	at java.lang.ProcessImpl.start(ProcessImpl.java:65)
	at java.lang.ProcessBuilder.start(ProcessBuilder.java:452)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of e301c480fdee080214b2eacf36f3e380 but expecting a2a5430c723463a392e0d288a9cb26d9

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of e301c480fdee080214b2eacf36f3e380 but expecting a2a5430c723463a392e0d288a9cb26d9
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410uh(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 13 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/13/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3290 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.028 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 6.164 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 19.446 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 35.52 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.043 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 224.533 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 396.317 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.464 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:736: Tests failed!

Total time: 66 minutes 8 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
4 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.EPollArrayWrapper.epollCreate(Native Method)
	at sun.nio.ch.EPollArrayWrapper.<init>(EPollArrayWrapper.java:68)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:52)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Responder.<init>(Server.java:602)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1501)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:615)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1332)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1350)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1403)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:201)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:435)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


REGRESSION:  org.apache.hadoop.hdfs.server.datanode.TestBlockReport.blockReport_08

Error Message:
Wrong number of PendingReplication blocks expected:<2> but was:<1>

Stack Trace:
junit.framework.AssertionFailedError: Wrong number of PendingReplication blocks expected:<2> but was:<1>
	at org.apache.hadoop.hdfs.server.datanode.TestBlockReport.__CLR3_0_2j2e00j11a4(TestBlockReport.java:414)
	at org.apache.hadoop.hdfs.server.datanode.TestBlockReport.blockReport_08(TestBlockReport.java:390)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of b3f5397d7461d2852f817bbafda0b360 but expecting 133b3c7abc724bacc922bc9669f12038

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of b3f5397d7461d2852f817bbafda0b360 but expecting 133b3c7abc724bacc922bc9669f12038
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410uh(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 12 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/12/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3040 lines...]
    [junit] Running org.apache.hadoop.hdfs.server.common.TestDistributedUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 31.336 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestGetUriFromString
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.139 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestJspHelper
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.831 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockReport
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 50.797 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeMXBean
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.39 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeMetrics
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 28.76 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeVolumeFailure
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.533 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeVolumeFailureReporting
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 72.15 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataXceiver
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.614 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDatanodeRestart
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 5.983 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestReplicasMap
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.085 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestWriteToReplica
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.12 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestAllowFormat
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.258 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlockTokenWithDFS
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 30.934 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlockUnderConstruction
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 3.354 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlocksWithNotEnoughRacks
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.019 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCheckPointForSecurityTokens
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 7.492 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCorruptFilesJsp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.716 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCorruptReplicaInfo
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.228 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestDeadDatanode
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 16.355 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestDecommissioningStatus
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.891 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestEditLogRace
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 30.099 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestFsck
    [junit] Tests run: 8, Failures: 0, Errors: 0, Time elapsed: 53.438 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestHDFSConcat
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.322 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete.xml was length 0

FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 898dfad64832a6759c34cf099d52efae but expecting 9b985408481219df62f6d22a9b0fbfd5

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 898dfad64832a6759c34cf099d52efae but expecting 9b985408481219df62f6d22a9b0fbfd5
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410u7(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 11 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/11/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3280 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.113 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 7.038 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 19.788 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 39.263 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.267 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 226.803 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 413.289 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.331 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:736: Tests failed!

Total time: 66 minutes 44 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
1 tests failed.
FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of f82ea87df7612a4716e4c7214609435f but expecting bd45d16c9efd5d1cc3d60e950a0c4c83

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of f82ea87df7612a4716e4c7214609435f but expecting bd45d16c9efd5d1cc3d60e950a0c4c83
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410u7(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 10 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/10/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 1019 lines...]
ivy-init-antlib:

ivy-init:
[ivy:configure] :: Ivy 2.1.0 - 20090925235825 :: http://ant.apache.org/ivy/ ::
[ivy:configure] :: loading settings :: file = /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy/ivysettings.xml

ivy-resolve-common:
[ivy:resolve] 
[ivy:resolve] :: problems summary ::
[ivy:resolve] :::: WARNINGS
[ivy:resolve] 		module not found: org.apache.hadoop#hadoop-common;0.22.0-SNAPSHOT
[ivy:resolve] 	==== apache-snapshot: tried
[ivy:resolve] 	  https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.pom
[ivy:resolve] 	  -- artifact org.apache.hadoop#hadoop-common;0.22.0-SNAPSHOT!hadoop-common.jar:
[ivy:resolve] 	  https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.jar
[ivy:resolve] 	==== maven2: tried
[ivy:resolve] 	  http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.pom
[ivy:resolve] 	  -- artifact org.apache.hadoop#hadoop-common;0.22.0-SNAPSHOT!hadoop-common.jar:
[ivy:resolve] 	  http://repo1.maven.org/maven2/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.jar
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		::          UNRESOLVED DEPENDENCIES         ::
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] 		:: org.apache.hadoop#hadoop-common;0.22.0-SNAPSHOT: not found
[ivy:resolve] 		::::::::::::::::::::::::::::::::::::::::::::::
[ivy:resolve] :::: ERRORS
[ivy:resolve] 	SERVER ERROR: Bad Gateway url=https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/maven-metadata.xml
[ivy:resolve] 	SERVER ERROR: Bad Gateway url=https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.pom
[ivy:resolve] 	SERVER ERROR: Bad Gateway url=https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-common/0.22.0-SNAPSHOT/hadoop-common-0.22.0-SNAPSHOT.jar
[ivy:resolve] 
[ivy:resolve] :: USE VERBOSE OR DEBUG MESSAGE LEVEL FOR MORE DETAILS

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:1750: impossible to resolve dependencies:
	resolve failed - see output for details

Total time: 4 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/*.tar.gz': No such file or directory
mv: cannot stat `build/*.jar': No such file or directory
mv: cannot stat `build/test/findbugs': No such file or directory
mv: cannot stat `build/docs/api': No such file or directory
Build Failed
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
No tests ran.

Hadoop-Hdfs-22-branch - Build # 9 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/9/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2892 lines...]
    [junit] Running org.apache.hadoop.fs.loadGenerator.TestLoadGenerator
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 13.04 sec
    [junit] Running org.apache.hadoop.fs.permission.TestStickyBit
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 9.391 sec
    [junit] Running org.apache.hadoop.hdfs.TestBlockMissingException
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.863 sec
    [junit] Running org.apache.hadoop.hdfs.TestByteRangeInputStream
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.147 sec
    [junit] Running org.apache.hadoop.hdfs.TestClientBlockVerification
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 3.016 sec
    [junit] Running org.apache.hadoop.hdfs.TestClientProtocolForPipelineRecovery
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.511 sec
    [junit] Running org.apache.hadoop.hdfs.TestCrcCorruption
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 22.084 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientExcludedNodes
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.948 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSClientRetries
    [junit] Tests run: 5, Failures: 0, Errors: 0, Time elapsed: 47.011 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSPermission
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 16.901 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSRemove
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 14.926 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSStartupVersions
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.986 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 26.901 sec
    [junit] Running org.apache.hadoop.hdfs.TestDFSUtil
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.155 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeBlockScanner
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 247.522 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeConfig
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.57 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeDeath
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 116.198 sec
    [junit] Running org.apache.hadoop.hdfs.TestDatanodeRegistration
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.925 sec
    [junit] Running org.apache.hadoop.hdfs.TestDecommission
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 26.247 sec
    [junit] Running org.apache.hadoop.hdfs.TestDeprecatedKeys
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.269 sec
    [junit] Running org.apache.hadoop.hdfs.TestDfsOverAvroRpc
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 4.122 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileAppend4
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 9.561 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileConcurrentReader
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 24.566 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreation
    [junit] Tests run: 12, Failures: 0, Errors: 0, Time elapsed: 47.717 sec
    [junit] Running org.apache.hadoop.hdfs.TestFileCreationClient
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
2 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.TestFileCreationClient.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.TestFileCreationClient.xml was length 0

FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 69cb34dc2f4703f330573600392b95b0 but expecting fcc421ec3420dba57135ac329e68cb61

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 69cb34dc2f4703f330573600392b95b0 but expecting fcc421ec3420dba57135ac329e68cb61
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410tr(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 8 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/8/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 2338 lines...]
ivy-retrieve-system:

-compile-test-system.wrapper:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/system/test/classes
    [javac] Compiling 1 source file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/system/test/classes

jar-test-system:

-do-jar-test:
      [jar] Building jar: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/system/hadoop-hdfs-instrumented-test-0.22.0-SNAPSHOT.jar
      [jar] Building jar: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/system/hadoop-hdfs-instrumented-test-0.22.0-SNAPSHOT-sources.jar

set-version:
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/ivy

clean-sign:

sign:

signanddeploy:

simpledeploy:
[artifact:install-provider] Installing provider: org.apache.maven.wagon:wagon-http:jar:1.0-beta-2:runtime
[artifact:deploy] Deploying to https://repository.apache.org/content/repositories/snapshots
[artifact:deploy] [INFO] Retrieving previous build number from apache.snapshots.https
[artifact:deploy] Uploading: org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110103.223233-323.jar to apache.snapshots.https
[artifact:deploy] Uploaded 1013K
[artifact:deploy] An error has occurred while processing the Maven artifact tasks.
[artifact:deploy]  Diagnosis:
[artifact:deploy] 
[artifact:deploy] Error deploying artifact 'org.apache.hadoop:hadoop-hdfs:jar': Error deploying artifact: Failed to transfer file: https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110103.223233-323.jar.md5. Return code is: 502

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:1660: Error deploying artifact 'org.apache.hadoop:hadoop-hdfs:jar': Error deploying artifact: Failed to transfer file: https://repository.apache.org/content/repositories/snapshots/org/apache/hadoop/hadoop-hdfs/0.22.0-SNAPSHOT/hadoop-hdfs-0.22.0-20110103.223233-323.jar.md5. Return code is: 502

Total time: 33 minutes 16 seconds


======================================================================
======================================================================
STORE: saving artifacts
======================================================================
======================================================================


mv: cannot stat `build/test/findbugs': No such file or directory
Build Failed
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
No tests ran.

Hadoop-Hdfs-22-branch - Build # 7 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/7/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3555 lines...]

compile-hdfs-test:
   [delete] Deleting directory /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/cache

run-test-hdfs-excluding-commit-and-smoke:
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/data
    [mkdir] Created dir: /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/logs
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
     [copy] Copying 1 file to /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build-fi/test/extraconf
    [junit] WARNING: multiple versions of ant detected in path for junit 
    [junit]          jar:file:/homes/hudson/tools/ant/latest/lib/ant.jar!/org/apache/tools/ant/Project.class
    [junit]      and jar:file:/homes/hudson/.ivy2/cache/ant/ant/jars/ant-1.6.5.jar!/org/apache/tools/ant/Project.class
    [junit] Running org.apache.hadoop.fs.TestFiListPath
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 2.101 sec
    [junit] Running org.apache.hadoop.fs.TestFiRename
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 6.323 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHFlush
    [junit] Tests run: 9, Failures: 0, Errors: 0, Time elapsed: 18.919 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiHftp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 36.642 sec
    [junit] Running org.apache.hadoop.hdfs.TestFiPipelines
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 5.652 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol
    [junit] Tests run: 29, Failures: 0, Errors: 0, Time elapsed: 222.045 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiDataTransferProtocol2
    [junit] Tests run: 10, Failures: 0, Errors: 0, Time elapsed: 428.603 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestFiPipelineClose
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 36.3 sec

checkfailure:

-run-test-hdfs-fault-inject-withtestcaseonly:

run-test-hdfs-fault-inject:

BUILD FAILED
/grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build.xml:736: Tests failed!

Total time: 68 minutes 29 seconds
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
3 tests failed.
REGRESSION:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransfer

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.EPollArrayWrapper.epollCreate(Native Method)
	at sun.nio.ch.EPollArrayWrapper.<init>(EPollArrayWrapper.java:68)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:52)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Responder.<init>(Server.java:602)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1501)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.

Stack Trace:
java.io.IOException: Cannot lock storage /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/name1. The directory is already locked.
	at org.apache.hadoop.hdfs.server.common.Storage$StorageDirectory.lock(Storage.java:615)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1332)
	at org.apache.hadoop.hdfs.server.namenode.FSImage.format(FSImage.java:1350)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:1403)
	at org.apache.hadoop.hdfs.server.namenode.NameNode.format(NameNode.java:201)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:435)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 557ee89b88130e4bda193a26e45b995e but expecting 246f78d05d6150e4396e213e30ebb16e

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 557ee89b88130e4bda193a26e45b995e but expecting 246f78d05d6150e4396e213e30ebb16e
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410tl(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)




Hadoop-Hdfs-22-branch - Build # 6 - Still Failing

Posted by Apache Hudson Server <hu...@hudson.apache.org>.
See https://hudson.apache.org/hudson/job/Hadoop-Hdfs-22-branch/6/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 3003 lines...]
    [junit] Running org.apache.hadoop.hdfs.server.common.TestDistributedUpgrade
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 31.453 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestGetUriFromString
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.151 sec
    [junit] Running org.apache.hadoop.hdfs.server.common.TestJspHelper
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 0.79 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestBlockReport
    [junit] Tests run: 7, Failures: 0, Errors: 0, Time elapsed: 49.927 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeMXBean
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.429 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeMetrics
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 29.054 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeVolumeFailure
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 3.484 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataNodeVolumeFailureReporting
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 71.635 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDataXceiver
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.557 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestDatanodeRestart
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 5.804 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestReplicasMap
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 0.085 sec
    [junit] Running org.apache.hadoop.hdfs.server.datanode.TestWriteToReplica
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 5.204 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestAllowFormat
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 2.273 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlockTokenWithDFS
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 35.492 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlockUnderConstruction
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 3.377 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestBlocksWithNotEnoughRacks
    [junit] Tests run: 2, Failures: 0, Errors: 0, Time elapsed: 7.921 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCheckPointForSecurityTokens
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 7.338 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCorruptFilesJsp
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 8.212 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestCorruptReplicaInfo
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 0.228 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestDeadDatanode
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 16.29 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestDecommissioningStatus
    [junit] Tests run: 1, Failures: 0, Errors: 0, Time elapsed: 17.879 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestEditLogRace
    [junit] Tests run: 4, Failures: 0, Errors: 0, Time elapsed: 30.382 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestFsck
    [junit] Tests run: 8, Failures: 0, Errors: 0, Time elapsed: 34.835 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestHDFSConcat
    [junit] Tests run: 3, Failures: 0, Errors: 0, Time elapsed: 6.63 sec
    [junit] Running org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete
Build timed out. Aborting
[FINDBUGS] Skipping publisher since build result is FAILURE
Publishing Javadoc
Archiving artifacts
Recording test results
Recording fingerprints
Publishing Clover coverage report...
No Clover report will be published due to a Build Failure
Email was triggered for: Failure
Sending email for trigger: Failure



###################################################################################
############################## FAILED TESTS (if any) ##############################
3 tests failed.
FAILED:  TEST-org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete.xml.<init>

Error Message:


Stack Trace:
Test report file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/TEST-org.apache.hadoop.hdfs.server.namenode.TestLargeDirectoryDelete.xml was length 0

FAILED:  org.apache.hadoop.hdfs.TestFileConcurrentReader.testUnfinishedBlockCRCErrorNormalTransferVerySmallWrite

Error Message:
Too many open files

Stack Trace:
java.io.IOException: Too many open files
	at sun.nio.ch.IOUtil.initPipe(Native Method)
	at sun.nio.ch.EPollSelectorImpl.<init>(EPollSelectorImpl.java:49)
	at sun.nio.ch.EPollSelectorProvider.openSelector(EPollSelectorProvider.java:18)
	at java.nio.channels.Selector.open(Selector.java:209)
	at org.apache.hadoop.ipc.Server$Listener.<init>(Server.java:318)
	at org.apache.hadoop.ipc.Server.<init>(Server.java:1492)
	at org.apache.hadoop.ipc.RPC$Server.<init>(RPC.java:394)
	at org.apache.hadoop.ipc.WritableRpcEngine$Server.<init>(WritableRpcEngine.java:331)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:291)
	at org.apache.hadoop.ipc.WritableRpcEngine.getServer(WritableRpcEngine.java:47)
	at org.apache.hadoop.ipc.RPC.getServer(RPC.java:382)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.initIpcServer(DataNode.java:416)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.startDataNode(DataNode.java:507)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:281)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.<init>(DataNode.java:263)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.makeInstance(DataNode.java:1561)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1504)
	at org.apache.hadoop.hdfs.server.datanode.DataNode.instantiateDataNode(DataNode.java:1471)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:614)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:448)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:176)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:71)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:168)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.init(TestFileConcurrentReader.java:88)
	at org.apache.hadoop.hdfs.TestFileConcurrentReader.setUp(TestFileConcurrentReader.java:73)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore

Error Message:
Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 053d4feec1ab8d119cef4bbd1e058db0 but expecting 8dfbd8c71b1fe8e72a0d139dc66143fb

Stack Trace:
java.io.IOException: Image file /grid/0/hudson/hudson-slave/workspace/Hadoop-Hdfs-22-branch/trunk/build/test/data/dfs/secondary/current/fsimage is corrupt with MD5 checksum of 053d4feec1ab8d119cef4bbd1e058db0 but expecting 8dfbd8c71b1fe8e72a0d139dc66143fb
	at org.apache.hadoop.hdfs.server.namenode.FSImage.loadFSImage(FSImage.java:1062)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.doMerge(SecondaryNameNode.java:678)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode$CheckpointStorage.access$500(SecondaryNameNode.java:583)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doMerge(SecondaryNameNode.java:460)
	at org.apache.hadoop.hdfs.server.namenode.SecondaryNameNode.doCheckpoint(SecondaryNameNode.java:424)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.__CLR3_0_2dn2tm410tm(TestStorageRestore.java:316)
	at org.apache.hadoop.hdfs.server.namenode.TestStorageRestore.testStorageRestore(TestStorageRestore.java:286)