You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-dev@hadoop.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2015/11/25 10:37:10 UTC

Hadoop-Hdfs-trunk - Build # 2578 - Still Failing

See https://builds.apache.org/job/Hadoop-Hdfs-trunk/2578/

###################################################################################
########################## LAST 60 LINES OF THE CONSOLE ###########################
[...truncated 8033 lines...]
[INFO] Executing tasks

main:
    [mkdir] Created dir: /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/target/test-dir
[INFO] Executed tasks
[INFO] 
[INFO] --- maven-source-plugin:2.3:jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-source-plugin:2.3:test-jar-no-fork (hadoop-java-sources) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (dist-enforce) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-site-plugin:3.4:attach-descriptor (attach-descriptor) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-javadoc-plugin:2.8.1:jar (module-javadocs) @ hadoop-hdfs-project ---
[INFO] Skipping javadoc generation
[INFO] 
[INFO] --- maven-enforcer-plugin:1.3.1:enforce (depcheck) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- maven-checkstyle-plugin:2.15:checkstyle (default-cli) @ hadoop-hdfs-project ---
[INFO] 
[INFO] --- findbugs-maven-plugin:3.0.0:findbugs (default-cli) @ hadoop-hdfs-project ---
[INFO] ------------------------------------------------------------------------
[INFO] Reactor Summary:
[INFO] 
[INFO] Apache Hadoop HDFS Client ......................... SUCCESS [08:19 min]
[INFO] Apache Hadoop HDFS ................................ FAILURE [  05:05 h]
[INFO] Apache Hadoop HDFS Native Client .................. SKIPPED
[INFO] Apache Hadoop HttpFS .............................. SKIPPED
[INFO] Apache Hadoop HDFS BookKeeper Journal ............. SKIPPED
[INFO] Apache Hadoop HDFS-NFS ............................ SKIPPED
[INFO] Apache Hadoop HDFS Project ........................ SUCCESS [  0.143 s]
[INFO] ------------------------------------------------------------------------
[INFO] BUILD FAILURE
[INFO] ------------------------------------------------------------------------
[INFO] Total time: 05:13 h
[INFO] Finished at: 2015-11-25T09:36:41+00:00
[INFO] Final Memory: 57M/835M
[INFO] ------------------------------------------------------------------------
[ERROR] Failed to execute goal org.apache.maven.plugins:maven-surefire-plugin:2.17:test (default-test) on project hadoop-hdfs: There are test failures.
[ERROR] 
[ERROR] Please refer to /home/jenkins/jenkins-slave/workspace/Hadoop-Hdfs-trunk/hadoop-hdfs-project/hadoop-hdfs/target/surefire-reports for the individual test results.
[ERROR] -> [Help 1]
[ERROR] 
[ERROR] To see the full stack trace of the errors, re-run Maven with the -e switch.
[ERROR] Re-run Maven using the -X switch to enable full debug logging.
[ERROR] 
[ERROR] For more information about the errors and possible solutions, please read the following articles:
[ERROR] [Help 1] http://cwiki.apache.org/confluence/display/MAVEN/MojoFailureException
[ERROR] 
[ERROR] After correcting the problems, you can resume the build with the command
[ERROR]   mvn <goals> -rf :hadoop-hdfs
Build step 'Execute shell' marked build as failure
Archiving artifacts
Recording test results
Updating HADOOP-12415
Sending e-mails to: hdfs-dev@hadoop.apache.org
Email was triggered for: Failure - Any
Sending email for trigger: Failure - Any



###################################################################################
############################## FAILED TESTS (if any) ##############################
8 tests failed.
FAILED:  org.apache.hadoop.hdfs.TestPipelines.pipeline_01

Error Message:
org/apache/hadoop/security/proto/SecurityProtos$CancelDelegationTokenRequestProto$1

Stack Trace:
java.lang.NoClassDefFoundError: org/apache/hadoop/security/proto/SecurityProtos$CancelDelegationTokenRequestProto$1
	at java.net.URLClassLoader$1.run(URLClassLoader.java:366)
	at java.net.URLClassLoader$1.run(URLClassLoader.java:355)
	at java.security.AccessController.doPrivileged(Native Method)
	at java.net.URLClassLoader.findClass(URLClassLoader.java:354)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:425)
	at sun.misc.Launcher$AppClassLoader.loadClass(Launcher.java:308)
	at java.lang.ClassLoader.loadClass(ClassLoader.java:358)
	at org.apache.hadoop.security.proto.SecurityProtos$CancelDelegationTokenRequestProto.<clinit>(SecurityProtos.java:3095)
	at java.lang.Class.forName0(Native Method)
	at java.lang.Class.forName(Class.java:190)
	at com.sun.proxy.$Proxy21.<clinit>(Unknown Source)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance0(Native Method)
	at sun.reflect.NativeConstructorAccessorImpl.newInstance(NativeConstructorAccessorImpl.java:57)
	at sun.reflect.DelegatingConstructorAccessorImpl.newInstance(DelegatingConstructorAccessorImpl.java:45)
	at java.lang.reflect.Constructor.newInstance(Constructor.java:526)
	at java.lang.reflect.Proxy.newInstance(Proxy.java:748)
	at java.lang.reflect.Proxy.newProxyInstance(Proxy.java:739)
	at org.apache.hadoop.ipc.ProtobufRpcEngine.getProxy(ProtobufRpcEngine.java:104)
	at org.apache.hadoop.ipc.RPC.getProtocolProxy(RPC.java:581)
	at org.apache.hadoop.hdfs.NameNodeProxiesClient.createNonHAProxyWithClientProtocol(NameNodeProxiesClient.java:343)
	at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:131)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:339)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:283)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:274)
	at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:266)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2443)
	at org.apache.hadoop.hdfs.MiniDFSCluster.waitActive(MiniDFSCluster.java:2489)
	at org.apache.hadoop.hdfs.MiniDFSCluster.startDataNodes(MiniDFSCluster.java:1632)
	at org.apache.hadoop.hdfs.MiniDFSCluster.initMiniDFSCluster(MiniDFSCluster.java:844)
	at org.apache.hadoop.hdfs.MiniDFSCluster.<init>(MiniDFSCluster.java:482)
	at org.apache.hadoop.hdfs.MiniDFSCluster$Builder.build(MiniDFSCluster.java:441)
	at org.apache.hadoop.hdfs.TestPipelines.startUpCluster(TestPipelines.java:62)


FAILED:  org.apache.hadoop.hdfs.server.blockmanagement.TestReplicationPolicy.testChooseTargetWithMoreThanAvailableNodesWithStaleness[0]

Error Message:
null

Stack Trace:
java.lang.AssertionError: null
	at org.junit.Assert.fail(Assert.java:86)
	at org.junit.Assert.assertTrue(Assert.java:41)
	at org.junit.Assert.assertTrue(Assert.java:52)
	at org.apache.hadoop.hdfs.server.blockmanagement.TestReplicationPolicy.testChooseTargetWithMoreThanAvailableNodes(TestReplicationPolicy.java:517)
	at org.apache.hadoop.hdfs.server.blockmanagement.TestReplicationPolicy.testChooseTargetWithMoreThanAvailableNodesWithStaleness(TestReplicationPolicy.java:479)


FAILED:  org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.TestLazyPersistReplicaRecovery.testDnRestartWithSavedReplicas

Error Message:

Expected: is <DISK>
     but: was <RAM_DISK>

Stack Trace:
java.lang.AssertionError: 
Expected: is <DISK>
     but: was <RAM_DISK>
	at org.hamcrest.MatcherAssert.assertThat(MatcherAssert.java:20)
	at org.junit.Assert.assertThat(Assert.java:865)
	at org.junit.Assert.assertThat(Assert.java:832)
	at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.LazyPersistTestCase.ensureFileReplicasOnStorageType(LazyPersistTestCase.java:141)
	at org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.TestLazyPersistReplicaRecovery.testDnRestartWithSavedReplicas(TestLazyPersistReplicaRecovery.java:53)


FAILED:  org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.testExceedsCapacity

Error Message:
Pending cached list of 127.0.0.1:57670 is not empty, [{blockId=1073741841, replication=1, mark=true}]

Stack Trace:
java.lang.AssertionError: Pending cached list of 127.0.0.1:57670 is not empty, [{blockId=1073741841, replication=1, mark=true}]
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.assertTrue(Assert.java:41)
	at org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.checkPendingCachedEmpty(TestCacheDirectives.java:1479)
	at org.apache.hadoop.hdfs.server.namenode.TestCacheDirectives.testExceedsCapacity(TestCacheDirectives.java:1502)


FAILED:  org.apache.hadoop.security.TestPermission.testBackwardCompatibility

Error Message:
expected:<18> but was:<0>

Stack Trace:
java.lang.AssertionError: expected:<18> but was:<0>
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotEquals(Assert.java:743)
	at org.junit.Assert.assertEquals(Assert.java:118)
	at org.junit.Assert.assertEquals(Assert.java:555)
	at org.junit.Assert.assertEquals(Assert.java:542)
	at org.apache.hadoop.security.TestPermission.testBackwardCompatibility(TestPermission.java:98)


FAILED:  org.apache.hadoop.tracing.TestTracing.testTracing

Error Message:
expected:<-8727621675617939615> but was:<-4477541422191008168>

Stack Trace:
java.lang.AssertionError: expected:<-8727621675617939615> but was:<-4477541422191008168>
	at org.junit.Assert.fail(Assert.java:88)
	at org.junit.Assert.failNotEquals(Assert.java:743)
	at org.junit.Assert.assertEquals(Assert.java:118)
	at org.junit.Assert.assertEquals(Assert.java:555)
	at org.junit.Assert.assertEquals(Assert.java:542)
	at org.apache.hadoop.tracing.TestTracing.readWithTracing(TestTracing.java:177)
	at org.apache.hadoop.tracing.TestTracing.testTracing(TestTracing.java:80)


FAILED:  org.apache.hadoop.hdfs.TestEncryptionZones.testStartFileRetry

Error Message:
test timed out after 120000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 120000 milliseconds
	at sun.misc.Unsafe.park(Native Method)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.parkAndCheckInterrupt(AbstractQueuedSynchronizer.java:834)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.doAcquireSharedInterruptibly(AbstractQueuedSynchronizer.java:994)
	at java.util.concurrent.locks.AbstractQueuedSynchronizer.acquireSharedInterruptibly(AbstractQueuedSynchronizer.java:1303)
	at java.util.concurrent.CountDownLatch.await(CountDownLatch.java:236)
	at org.apache.hadoop.hdfs.TestEncryptionZones.testStartFileRetry(TestEncryptionZones.java:1053)


FAILED:  org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.testThrottling

Error Message:
test timed out after 300000 milliseconds

Stack Trace:
java.lang.Exception: test timed out after 300000 milliseconds
	at sun.misc.Unsafe.park(Native Method)
	at java.util.concurrent.locks.LockSupport.park(LockSupport.java:186)
	at java.util.concurrent.FutureTask.awaitDone(FutureTask.java:425)
	at java.util.concurrent.FutureTask.get(FutureTask.java:187)
	at org.apache.hadoop.hdfs.server.datanode.DirectoryScanner.getDiskReport(DirectoryScanner.java:731)
	at org.apache.hadoop.hdfs.server.datanode.DirectoryScanner.scan(DirectoryScanner.java:581)
	at org.apache.hadoop.hdfs.server.datanode.DirectoryScanner.reconcile(DirectoryScanner.java:562)
	at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.scan(TestDirectoryScanner.java:307)
	at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.scan(TestDirectoryScanner.java:301)
	at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.runThrottleTest(TestDirectoryScanner.java:760)
	at org.apache.hadoop.hdfs.server.datanode.TestDirectoryScanner.testThrottling(TestDirectoryScanner.java:606)