You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/08/17 00:20:26 UTC
Build failed in Jenkins: Kafka-0.8 #22
See <https://builds.apache.org/job/Kafka-0.8/22/changes>
Changes:
[jjkoshy] KAFKA-385 Fix race condition between checkSatisfied and expire in RequestPurgatory; fixed constant expiration of follower fetch requests as checkSatisfied was not getting called; add metrics to the RequestPurgatory; add a KafkaTimer convenience class; patched by Joel Koshy; reviewed by Jun Rao and Jay Kreps.
------------------------------------------
[...truncated 4348 lines...]
[0m[[0minfo[0m] [0mTest Starting: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Starting: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Passed: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRolls[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.BackwardsCompatibilityTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProtocolVersion0(kafka.integration.BackwardsCompatibilityTest)[0m
[2012-08-16 22:19:29,705] WARN Exception causing close of session 0x0 due to java.io.IOException: ZooKeeperServer not running (org.apache.zookeeper.server.NIOServerCnxn:639)
[0m[[0minfo[0m] [0mTest Passed: testProtocolVersion0(kafka.integration.BackwardsCompatibilityTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.BackwardsCompatibilityTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[2012-08-16 22:19:31,601] ERROR Closing socket for /127.0.0.1 because of error (kafka.network.Processor:99)
java.io.IOException: Connection reset by peer
at sun.nio.ch.FileDispatcher.read0(Native Method)
at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
at sun.nio.ch.IOUtil.read(IOUtil.java:171)
at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
at kafka.utils.Utils$.read(Utils.scala:631)
at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
at kafka.network.Processor.read(SocketServer.scala:296)
at kafka.network.Processor.run(SocketServer.scala:212)
at java.lang.Thread.run(Thread.java:662)
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
Offsets = 240,216,108,0
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.FetcherTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFetcher(kafka.consumer.FetcherTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetcher(kafka.consumer.FetcherTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.FetcherTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[0m[[0minfo[0m] [0mTest Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Passed: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Starting: testChecksum[0m
[0m[[0minfo[0m] [0mTest Passed: testChecksum[0m
[0m[[0minfo[0m] [0mTest Starting: testEquality[0m
[0m[[0minfo[0m] [0mTest Passed: testEquality[0m
[0m[[0minfo[0m] [0mTest Starting: testIsHashable[0m
[0m[[0minfo[0m] [0mTest Passed: testIsHashable[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConfigurablePartitions(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConfigurablePartitions(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSingleMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSingleMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressedMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressedMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceRequestForUnknownTopic(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceRequestForUnknownTopic(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.controller.ControllerBasicTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testControllerFailOver(kafka.controller.ControllerBasicTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testControllerFailOver(kafka.controller.ControllerBasicTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testControllerCommandSend(kafka.controller.ControllerBasicTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testControllerCommandSend(kafka.controller.ControllerBasicTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.controller.ControllerBasicTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_c49d0162[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 140, Failed 1, Errors 0, Passed 139, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== java-examples / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== hadoop consumer / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== hadoop producer / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[31merror[0m] [0mError running kafka.producer.AsyncProducerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 195 s, completed Aug 16, 2012 10:20:24 PM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 195 s, completed Aug 16, 2012 10:20:24 PM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Jenkins build is back to normal : Kafka-0.8 #26
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/26/changes>
Build failed in Jenkins: Kafka-0.8 #25
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/25/changes>
Changes:
[junrao] failed ERROR messages in LazyInitProducerTest; patched by Yang Ye; reviewed by Jun Rao; kafka-467
------------------------------------------
[...truncated 1026 lines...]
[0m[[0minfo[0m] [0mTest Starting: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.BackwardsCompatibilityTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProtocolVersion0(kafka.integration.BackwardsCompatibilityTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProtocolVersion0(kafka.integration.BackwardsCompatibilityTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.BackwardsCompatibilityTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Starting: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Passed: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRolls[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Passed: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testIterator[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testEqualsWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testEqualsWithCompression[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Passed: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Starting: testChecksum[0m
[0m[[0minfo[0m] [0mTest Passed: testChecksum[0m
[0m[[0minfo[0m] [0mTest Starting: testEquality[0m
[0m[[0minfo[0m] [0mTest Passed: testEquality[0m
[0m[[0minfo[0m] [0mTest Starting: testIsHashable[0m
[0m[[0minfo[0m] [0mTest Passed: testIsHashable[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[31merror[0m] [0mTest Failed: testSendToNewTopic(kafka.producer.ProducerTest)[0m
java.lang.AssertionError: Message set should not have any more messages
at org.junit.Assert.fail(Assert.java:69)
at org.junit.Assert.assertTrue(Assert.java:32)
at org.junit.Assert.assertFalse(Assert.java:51)
at kafka.producer.ProducerTest.testSendToNewTopic(ProducerTest.scala:183)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [0mTest Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testFileSize[0m
[0m[[0minfo[0m] [0mTest Passed: testFileSize[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Starting: testRead[0m
[0m[[0minfo[0m] [0mTest Passed: testRead[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 140, Failed 1, Errors 0, Passed 139, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_284e8f86[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.producer.ProducerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 202 s, completed Aug 18, 2012 6:05:19 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 203 s, completed Aug 18, 2012 6:05:19 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #24
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/24/changes>
Changes:
[junrao] KafkaController NPE in SessionExpireListener; patched by Yang Ye; reviewed by Jun Rao, Neha Narkhede; KAFKA-464
------------------------------------------
[...truncated 5638 lines...]
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-08-18 00:41:51,849] WARN Session 0x139372cb9020003 for server null, unexpected error, closing socket connection and attempting reconnect (org.apache.zookeeper.ClientCnxn:1188)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:1071)
at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1104)
[2012-08-18 00:41:51,849] WARN Session 0x139372d09690002 for server null, unexpected error, closing socket connection and attempting reconnect (org.apache.zookeeper.ClientCnxn:1188)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:1071)
at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1104)
[2012-08-18 00:41:51,848] WARN Session 0x139372b1d270002 for server null, unexpected error, closing socket connection and attempting reconnect (org.apache.zookeeper.ClientCnxn:1188)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:1071)
at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1104)
[2012-08-18 00:41:51,848] WARN Session 0x139372d123f0002 for server null, unexpected error, closing socket connection and attempting reconnect (org.apache.zookeeper.ClientCnxn:1188)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:1071)
at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1104)
[2012-08-18 00:41:51,848] WARN Session 0x139372d08910002 for server null, unexpected error, closing socket connection and attempting reconnect (org.apache.zookeeper.ClientCnxn:1188)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at org.apache.zookeeper.ClientCnxn$SendThread.startConnect(ClientCnxn.java:1071)
at org.apache.zookeeper.ClientCnxn$SendThread.run(ClientCnxn.java:1104)
[2012-08-18 00:41:51,868] ERROR Connection attempt to localhost:47213 failed, next attempt in 100 ms (kafka.producer.SyncProducer:99)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:161)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:182)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:116)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:86)
at kafka.producer.async.DefaultEventHandler$$anonfun$handle$1.apply$mcV$sp(DefaultEventHandler.scala:53)
at kafka.utils.Utils$.swallow(Utils.scala:415)
at kafka.utils.Logging$class.swallowError(Logging.scala:102)
at kafka.utils.Utils$.swallowError(Utils.scala:40)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:53)
at kafka.producer.AsyncProducerTest.testFailedSendRetryLogic(AsyncProducerTest.scala:438)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-08-18 00:41:51,978] ERROR Producer connection to localhost:47213 timing out after 5000 ms (kafka.producer.SyncProducer:99)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:161)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:182)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:116)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:86)
at kafka.producer.async.DefaultEventHandler$$anonfun$handle$1.apply$mcV$sp(DefaultEventHandler.scala:53)
at kafka.utils.Utils$.swallow(Utils.scala:415)
at kafka.utils.Logging$class.swallowError(Logging.scala:102)
at kafka.utils.Utils$.swallowError(Utils.scala:40)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:53)
at kafka.producer.AsyncProducerTest.testFailedSendRetryLogic(AsyncProducerTest.scala:438)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-08-18 00:41:51,979] ERROR fetching broker partition metadata for topics [ListBuffer(topic1)] from broker [ArrayBuffer(id:0,creatorId:localhost-1345250491862,host:localhost,port:47213)] failed (kafka.utils.Utils$:102)
kafka.common.KafkaException: fetching broker partition metadata for topics [ListBuffer(topic1)] from broker [ArrayBuffer(id:0,creatorId:localhost-1345250491862,host:localhost,port:47213)] failed
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:112)
at kafka.producer.async.DefaultEventHandler$$anonfun$handle$1.apply$mcV$sp(DefaultEventHandler.scala:53)
at kafka.utils.Utils$.swallow(Utils.scala:415)
at kafka.utils.Logging$class.swallowError(Logging.scala:102)
at kafka.utils.Utils$.swallowError(Utils.scala:40)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:53)
at kafka.producer.AsyncProducerTest.testFailedSendRetryLogic(AsyncProducerTest.scala:438)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:507)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:161)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:182)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:116)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:86)
... 41 more
[0m[[0minfo[0m] [0mTest Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[0m[[0minfo[0m] [0mTest Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /var/tmp/sbt_f0ab0172[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 140, Failed 2, Errors 0, Passed 138, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.message.CompressionUtilTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.producer.AsyncProducerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 289 s, completed Aug 18, 2012 12:41:57 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 290 s, completed Aug 18, 2012 12:41:57 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Re: Build failed in Jenkins: Kafka-0.8 #23
Posted by Jay Kreps <ja...@gmail.com>.
It looks like these problems come from the tests not being reentrant due to
using hard-coded ports. Hard-coding ports is a problem because our tests
may run at the same time for different branches or may run on the same test
server as other tests that are using that port.
There is not reason to use a hard-coded port as we have a utility function
that will find a set of free ports for you:
TestUtils.choosePort // to get one free port
or
TestUtils.choosePorts(5) // to get 5 free ports
-Jay
On Fri, Aug 17, 2012 at 3:08 PM, Apache Jenkins Server <
jenkins@builds.apache.org> wrote:
> See <https://builds.apache.org/job/Kafka-0.8/23/changes>
>
> Changes:
>
> [junrao] enforce broker.id to be a non-negative integer; patched by
> Swapnil Ghike; reviewed by Jun Rao, Neha Narkhede; KAFKA-424
>
> ------------------------------------------
> [...truncated 3791 lines...]
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
> at
> org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
> at sbt.TestRunner.run(TestFramework.scala:53)
> at sbt.TestRunner.runTest$1(TestFramework.scala:67)
> at sbt.TestRunner.run(TestFramework.scala:76)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at sbt.NamedTestTask.run(TestFramework.scala:92)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
> at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
> at sbt.impl.RunTask.runTask(RunTask.scala:85)
> at
> sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at sbt.Control$.trapUnit(Control.scala:19)
> at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
> [0m[ [0minfo [0m] [0mTest Starting:
> testMultiProduce(kafka.integration.LazyInitProducerTest) [0m
> [0m[ [31merror [0m] [0mTest Failed:
> testMultiProduce(kafka.integration.LazyInitProducerTest) [0m
> java.net.BindException: Address already in use
> at sun.nio.ch.Net.bind(Native Method)
> at
> sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
> at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
> at
> kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
> at
> kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
> at junit.framework.TestCase.runBare(TestCase.java:128)
> at junit.framework.TestResult$1.protect(TestResult.java:110)
> at junit.framework.TestResult.runProtected(TestResult.java:128)
> at junit.framework.TestResult.run(TestResult.java:113)
> at junit.framework.TestCase.run(TestCase.java:120)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
> at
> org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
> at sbt.TestRunner.run(TestFramework.scala:53)
> at sbt.TestRunner.runTest$1(TestFramework.scala:67)
> at sbt.TestRunner.run(TestFramework.scala:76)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at sbt.NamedTestTask.run(TestFramework.scala:92)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
> at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
> at sbt.impl.RunTask.runTask(RunTask.scala:85)
> at
> sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at sbt.Control$.trapUnit(Control.scala:19)
> at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
> [0m[ [0minfo [0m] [0mTest Starting:
> testProduceAndFetch(kafka.integration.LazyInitProducerTest) [0m
> [0m[ [31merror [0m] [0mTest Failed:
> testProduceAndFetch(kafka.integration.LazyInitProducerTest) [0m
> java.net.BindException: Address already in use
> at sun.nio.ch.Net.bind(Native Method)
> at
> sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
> at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
> at
> kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
> at
> kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
> at junit.framework.TestCase.runBare(TestCase.java:128)
> at junit.framework.TestResult$1.protect(TestResult.java:110)
> at junit.framework.TestResult.runProtected(TestResult.java:128)
> at junit.framework.TestResult.run(TestResult.java:113)
> at junit.framework.TestCase.run(TestCase.java:120)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
> at
> org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
> at sbt.TestRunner.run(TestFramework.scala:53)
> at sbt.TestRunner.runTest$1(TestFramework.scala:67)
> at sbt.TestRunner.run(TestFramework.scala:76)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at sbt.NamedTestTask.run(TestFramework.scala:92)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
> at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
> at sbt.impl.RunTask.runTask(RunTask.scala:85)
> at
> sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at sbt.Control$.trapUnit(Control.scala:19)
> at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
> [0m[ [0minfo [0m] [0mTest Starting:
> testMultiProduceResend(kafka.integration.LazyInitProducerTest) [0m
> [0m[ [31merror [0m] [0mTest Failed:
> testMultiProduceResend(kafka.integration.LazyInitProducerTest) [0m
> java.net.BindException: Address already in use
> at sun.nio.ch.Net.bind(Native Method)
> at
> sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
> at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
> at
> kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
> at
> kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
> at
> kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
> at
> kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
> at junit.framework.TestCase.runBare(TestCase.java:128)
> at junit.framework.TestResult$1.protect(TestResult.java:110)
> at junit.framework.TestResult.runProtected(TestResult.java:128)
> at junit.framework.TestResult.run(TestResult.java:113)
> at junit.framework.TestCase.run(TestCase.java:120)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
> at
> org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
> at sbt.TestRunner.run(TestFramework.scala:53)
> at sbt.TestRunner.runTest$1(TestFramework.scala:67)
> at sbt.TestRunner.run(TestFramework.scala:76)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at sbt.NamedTestTask.run(TestFramework.scala:92)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
> at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
> at sbt.impl.RunTask.runTask(RunTask.scala:85)
> at
> sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at sbt.Control$.trapUnit(Control.scala:19)
> at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
> [0m[ [0minfo [0m] [34m== core-kafka /
> kafka.integration.LazyInitProducerTest == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==
> [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest) [0m
> [0m[ [31merror [0m] [0mTest Failed:
> testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest) [0m
> java.net.BindException: Address already in use
> at sun.nio.ch.Net.bind(Native Method)
> at
> sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
> at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
> at
> org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
> at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
> at
> kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
> at
> kafka.server.LeaderElectionTest.setUp(LeaderElectionTest.scala:39)
> at junit.framework.TestCase.runBare(TestCase.java:128)
> at junit.framework.TestResult$1.protect(TestResult.java:110)
> at junit.framework.TestResult.runProtected(TestResult.java:128)
> at junit.framework.TestResult.run(TestResult.java:113)
> at junit.framework.TestCase.run(TestCase.java:120)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at junit.framework.TestSuite.runTest(TestSuite.java:228)
> at junit.framework.TestSuite.run(TestSuite.java:223)
> at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
> at
> org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
> at sbt.TestRunner.run(TestFramework.scala:53)
> at sbt.TestRunner.runTest$1(TestFramework.scala:67)
> at sbt.TestRunner.run(TestFramework.scala:76)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at
> sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
> at sbt.NamedTestTask.run(TestFramework.scala:92)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at
> sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
> at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
> at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
> at sbt.impl.RunTask.runTask(RunTask.scala:85)
> at
> sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at
> sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
> at sbt.Control$.trapUnit(Control.scala:19)
> at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==
> [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.network.SocketServerTest ==
> [0m
> [0m[ [0minfo [0m] [0mTest Starting: simpleRequest [0m
> [0m[ [0minfo [0m] [0mTest Passed: simpleRequest [0m
> [0m[ [0minfo [0m] [0mTest Starting: tooBigRequestIsRejected [0m
> [0m[ [0minfo [0m] [0mTest Passed: tooBigRequestIsRejected [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.network.SocketServerTest ==
> [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m
> [0m[ [31merror [0m] [0mFailed: : Total 140, Failed 61, Errors 0, Passed
> 79, Skipped 0 [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m
> [0m[ [0minfo [0m] [0mDeleting directory /tmp/sbt_c45e98f8 [0m
> [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m
> [0m[ [31merror [0m] [0mError running kafka.zk.ZKEphemeralTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.server.ReplicaFetchTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.server.LeaderElectionTest:
> Test FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.javaapi.consumer.ZookeeperConsumerConnectorTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.server.ServerShutdownTest:
> Test FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.producer.SyncProducerTest:
> Test FAILED [0m
> [0m[ [31merror [0m] [0mError running test: One or more subtasks failed
> [0m
> [0m[ [31merror [0m] [0mError running kafka.server.LogRecoveryTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.integration.LazyInitProducerTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.integration.AutoOffsetResetTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.admin.AdminTest: Test FAILED
> [0m
> [0m[ [31merror [0m] [0mError running kafka.log.LogOffsetTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.log.LogCorruptionTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.integration.TopicMetadataTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.log.LogManagerTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.consumer.ZookeeperConsumerConnectorTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.integration.BackwardsCompatibilityTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running
> kafka.controller.ControllerBasicTest: Test FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.integration.PrimitiveApiTest:
> Test FAILED [0m
> [0m[ [0minfo [0m] [0m [0m
> [0m[ [0minfo [0m] [0mTotal time: 109 s, completed Aug 17, 2012 10:08:41
> PM [0m
> [0m[ [0minfo [0m] [0m [0m
> [0m[ [0minfo [0m] [0mTotal session time: 109 s, completed Aug 17, 2012
> 10:08:41 PM [0m
> [0m[ [31merror [0m] [0mError during build. [0m
> Build step 'Execute shell' marked build as failure
>
Build failed in Jenkins: Kafka-0.8 #23
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/23/changes>
Changes:
[junrao] enforce broker.id to be a non-negative integer; patched by Swapnil Ghike; reviewed by Jun Rao, Neha Narkhede; KAFKA-424
------------------------------------------
[...truncated 3791 lines...]
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[31merror[0m] [0mTest Failed: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
java.net.BindException: Address already in use
at sun.nio.ch.Net.bind(Native Method)
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
at kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
at kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
at kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
at kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
at junit.framework.TestCase.runBare(TestCase.java:128)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [0mTest Starting: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[31merror[0m] [0mTest Failed: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
java.net.BindException: Address already in use
at sun.nio.ch.Net.bind(Native Method)
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
at kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
at kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
at kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
at kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
at junit.framework.TestCase.runBare(TestCase.java:128)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[31merror[0m] [0mTest Failed: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
java.net.BindException: Address already in use
at sun.nio.ch.Net.bind(Native Method)
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
at kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
at kafka.integration.LazyInitProducerTest.kafka$integration$KafkaServerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.KafkaServerTestHarness$class.setUp(KafkaServerTestHarness.scala:35)
at kafka.integration.LazyInitProducerTest.kafka$integration$ProducerConsumerTestHarness$$super$setUp(LazyInitProducerTest.scala:33)
at kafka.integration.ProducerConsumerTestHarness$class.setUp(ProducerConsumerTestHarness.scala:34)
at kafka.integration.LazyInitProducerTest.setUp(LazyInitProducerTest.scala:42)
at junit.framework.TestCase.runBare(TestCase.java:128)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[31merror[0m] [0mTest Failed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
java.net.BindException: Address already in use
at sun.nio.ch.Net.bind(Native Method)
at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:126)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:59)
at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:52)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:144)
at org.apache.zookeeper.server.NIOServerCnxn$Factory.<init>(NIOServerCnxn.java:125)
at kafka.zk.EmbeddedZookeeper.<init>(EmbeddedZookeeper.scala:32)
at kafka.zk.ZooKeeperTestHarness$class.setUp(ZooKeeperTestHarness.scala:32)
at kafka.server.LeaderElectionTest.setUp(LeaderElectionTest.scala:39)
at junit.framework.TestCase.runBare(TestCase.java:128)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 140, Failed 61, Errors 0, Passed 79, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_c45e98f8[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.zk.ZKEphemeralTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.server.ReplicaFetchTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.server.LeaderElectionTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.javaapi.consumer.ZookeeperConsumerConnectorTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.server.ServerShutdownTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.producer.SyncProducerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[31merror[0m] [0mError running kafka.server.LogRecoveryTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.LazyInitProducerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.AutoOffsetResetTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.admin.AdminTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.log.LogOffsetTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.log.LogCorruptionTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.TopicMetadataTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.log.LogManagerTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.consumer.ZookeeperConsumerConnectorTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.BackwardsCompatibilityTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.controller.ControllerBasicTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.PrimitiveApiTest: Test FAILED[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 109 s, completed Aug 17, 2012 10:08:41 PM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 109 s, completed Aug 17, 2012 10:08:41 PM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure