You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/09/12 04:14:18 UTC
Build failed in Jenkins: Kafka-0.8 #40
See <https://builds.apache.org/job/Kafka-0.8/40/changes>
Changes:
[junrao] Check max message size on server; patched by Swapnil Ghike; reviewed by Joel Koshy and Jun Rao; KAFKA-490
------------------------------------------
[...truncated 446 lines...]
[0m[[0minfo[0m] [0mTest Passed: testISRExpirationForSlowFollowers(kafka.server.ISRExpirationTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ISRExpirationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Starting: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Passed: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeCheck[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeCheck[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Passed: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testIterator[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogCorruptionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeTooLarge(kafka.log.LogCorruptionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeTooLarge(kafka.log.LogCorruptionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogCorruptionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.admin.AdminTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testManualReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testManualReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicCreationInZK(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicCreationInZK(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetTopicMetadata(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetTopicMetadata(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.admin.AdminTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ServerShutdownTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanShutdown(kafka.server.ServerShutdownTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanShutdown(kafka.server.ServerShutdownTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ServerShutdownTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.RpcDataSerializationTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.RpcDataSerializationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFetchRequestEnforcesUniqueTopicsForOffsetDetails(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetchRequestEnforcesUniqueTopicsForOffsetDetails(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testEqualsWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testEqualsWithCompression[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Passed: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Starting: testBlacklists[0m
[0m[[0minfo[0m] [0mTest Passed: testBlacklists[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Passed: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Starting: testChecksum[0m
[0m[[0minfo[0m] [0mTest Passed: testChecksum[0m
[0m[[0minfo[0m] [0mTest Starting: testEquality[0m
[0m[[0minfo[0m] [0mTest Passed: testEquality[0m
[0m[[0minfo[0m] [0mTest Starting: testIsHashable[0m
[0m[[0minfo[0m] [0mTest Passed: testIsHashable[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
Offsets = 240,216,108,0
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /var/tmp/sbt_4dfd6656[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 138, Failed 1, Errors 0, Passed 137, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.message.CompressionUtilTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 223 s, completed Sep 12, 2012 2:14:39 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 224 s, completed Sep 12, 2012 2:14:39 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #44
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/44/changes>
Changes:
[nehanarkhede] KAFKA-449: Leader election test
------------------------------------------
[...truncated 586 lines...]
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.CompressionUtilTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSimpleCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Passed: testSimpleCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Starting: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Passed: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Starting: testSnappyCompressDecompressExplicit[0m
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:317)
at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:219)
at org.xerial.snappy.Snappy.<clinit>(Snappy.java:44)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:79)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:66)
at kafka.message.SnappyCompression.<init>(CompressionUtils.scala:61)
at kafka.message.CompressionFactory$.apply(CompressionUtils.scala:82)
at kafka.message.CompressionUtils$.compress(CompressionUtils.scala:109)
at kafka.message.CompressionUtilTest.testSnappyCompressDecompressExplicit(CompressionUtilsTest.scala:65)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.junit.internal.runners.TestMethodRunner.executeMethodBody(TestMethodRunner.java:99)
at org.junit.internal.runners.TestMethodRunner.runUnprotected(TestMethodRunner.java:81)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestMethodRunner.runMethod(TestMethodRunner.java:75)
at org.junit.internal.runners.TestMethodRunner.run(TestMethodRunner.java:45)
at org.junit.internal.runners.TestClassMethodsRunner.invokeTestMethod(TestClassMethodsRunner.java:71)
at org.junit.internal.runners.TestClassMethodsRunner.run(TestClassMethodsRunner.java:35)
at org.junit.internal.runners.TestClassRunner$1.runUnprotected(TestClassRunner.java:42)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestClassRunner.run(TestClassRunner.java:52)
at org.junit.internal.runners.CompositeRunner.run(CompositeRunner.java:29)
at org.junit.runner.JUnitCore.run(JUnitCore.java:121)
at org.junit.runner.JUnitCore.run(JUnitCore.java:100)
at org.junit.runner.JUnitCore.run(JUnitCore.java:91)
at org.scalatest.junit.JUnitSuite$class.run(JUnitSuite.scala:261)
at kafka.message.CompressionUtilTest.run(CompressionUtilsTest.scala:25)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.lang.UnsatisfiedLinkError: no snappyjava in java.library.path
at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1734)
at java.lang.Runtime.loadLibrary0(Runtime.java:823)
at java.lang.System.loadLibrary(System.java:1028)
at org.xerial.snappy.SnappyNativeLoader.loadLibrary(SnappyNativeLoader.java:52)
... 53 more
[0m[[31merror[0m] [0mTest Failed: testSnappyCompressDecompressExplicit[0m
org.xerial.snappy.SnappyError: [FAILED_TO_LOAD_NATIVE_LIBRARY] null
at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:229)
at org.xerial.snappy.Snappy.<clinit>(Snappy.java:44)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:79)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:66)
at kafka.message.SnappyCompression.<init>(CompressionUtils.scala:61)
at kafka.message.CompressionFactory$.apply(CompressionUtils.scala:82)
at kafka.message.CompressionUtils$.compress(CompressionUtils.scala:109)
at kafka.message.CompressionUtilTest.testSnappyCompressDecompressExplicit(CompressionUtilsTest.scala:65)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.junit.internal.runners.TestMethodRunner.executeMethodBody(TestMethodRunner.java:99)
at org.junit.internal.runners.TestMethodRunner.runUnprotected(TestMethodRunner.java:81)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestMethodRunner.runMethod(TestMethodRunner.java:75)
at org.junit.internal.runners.TestMethodRunner.run(TestMethodRunner.java:45)
at org.junit.internal.runners.TestClassMethodsRunner.invokeTestMethod(TestClassMethodsRunner.java:71)
at org.junit.internal.runners.TestClassMethodsRunner.run(TestClassMethodsRunner.java:35)
at org.junit.internal.runners.TestClassRunner$1.runUnprotected(TestClassRunner.java:42)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestClassRunner.run(TestClassRunner.java:52)
at org.junit.internal.runners.CompositeRunner.run(CompositeRunner.java:29)
at org.junit.runner.JUnitCore.run(JUnitCore.java:121)
at org.junit.runner.JUnitCore.run(JUnitCore.java:100)
at org.junit.runner.JUnitCore.run(JUnitCore.java:91)
at org.scalatest.junit.JUnitSuite$class.run(JUnitSuite.scala:261)
at kafka.message.CompressionUtilTest.run(CompressionUtilsTest.scala:25)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.CompressionUtilTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEphemeralNodeCleanup(kafka.zk.ZKEphemeralTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.zk.ZKEphemeralTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Passed: testSmallFetchSize[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testIterator[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.admin.AdminTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testManualReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testManualReplicaAssignment(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicCreationInZK(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicCreationInZK(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetTopicMetadata(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetTopicMetadata(kafka.admin.AdminTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.admin.AdminTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Passed: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Starting: testChecksum[0m
[0m[[0minfo[0m] [0mTest Passed: testChecksum[0m
[0m[[0minfo[0m] [0mTest Starting: testEquality[0m
[0m[[0minfo[0m] [0mTest Passed: testEquality[0m
[0m[[0minfo[0m] [0mTest Starting: testIsHashable[0m
[0m[[0minfo[0m] [0mTest Passed: testIsHashable[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ISRExpirationTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testISRExpirationForStuckFollowers(kafka.server.ISRExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testISRExpirationForStuckFollowers(kafka.server.ISRExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testISRExpirationForSlowFollowers(kafka.server.ISRExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testISRExpirationForSlowFollowers(kafka.server.ISRExpirationTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ISRExpirationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeTooLarge(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReachableServer(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyProduceRequest(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProducerCanTimeout(kafka.producer.SyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.SyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /var/tmp/sbt_11a20027[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 137, Failed 1, Errors 0, Passed 136, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.message.CompressionUtilTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 221 s, completed Sep 14, 2012 6:15:11 PM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 223 s, completed Sep 14, 2012 6:15:11 PM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #43
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/43/changes>
Changes:
[junrao] revisit broker config in 0.8; patched by Swapnil Ghike; reviewed by Jun Rao; KAFKA-325
------------------------------------------
[...truncated 5 lines...]
A core/src/test/scala/unit/kafka/utils/TopicTest.scala
U core/src/test/scala/unit/kafka/utils/TestUtils.scala
U core/src/test/scala/unit/kafka/log/LogManagerTest.scala
U core/src/test/scala/unit/kafka/integration/TopicMetadataTest.scala
U core/src/test/scala/unit/kafka/server/SimpleFetchTest.scala
U core/src/test/scala/unit/kafka/network/SocketServerTest.scala
U core/src/main/scala/kafka/cluster/Replica.scala
U core/src/main/scala/kafka/cluster/Partition.scala
D core/src/main/scala/kafka/log/LogStats.scala
U core/src/main/scala/kafka/log/Log.scala
U core/src/main/scala/kafka/log/LogManager.scala
U core/src/main/scala/kafka/producer/SyncProducer.scala
U core/src/main/scala/kafka/producer/Producer.scala
D core/src/main/scala/kafka/producer/async/AsyncProducerStatsMBean.scala
U core/src/main/scala/kafka/producer/async/AsyncProducerStats.scala
U core/src/main/scala/kafka/producer/async/DefaultEventHandler.scala
U core/src/main/scala/kafka/producer/async/ProducerSendThread.scala
U core/src/main/scala/kafka/message/FileMessageSet.scala
U core/src/main/scala/kafka/admin/CreateTopicCommand.scala
D core/src/main/scala/kafka/network/SocketServerStats.scala
U core/src/main/scala/kafka/network/SocketServer.scala
U core/src/main/scala/kafka/network/RequestChannel.scala
U core/src/main/scala/kafka/consumer/ConsumerIterator.scala
U core/src/main/scala/kafka/consumer/SimpleConsumer.scala
U core/src/main/scala/kafka/consumer/ConsumerConnector.scala
U core/src/main/scala/kafka/consumer/PartitionTopicInfo.scala
U core/src/main/scala/kafka/consumer/ConsumerTopicStat.scala
U core/src/main/scala/kafka/consumer/ZookeeperConsumerConnector.scala
U core/src/main/scala/kafka/utils/Utils.scala
U core/src/main/scala/kafka/utils/Pool.scala
A core/src/main/scala/kafka/utils/Topic.scala
U core/src/main/scala/kafka/metrics/KafkaMetricsGroup.scala
U core/src/main/scala/kafka/server/KafkaConfig.scala
U core/src/main/scala/kafka/server/KafkaServer.scala
U core/src/main/scala/kafka/server/RequestPurgatory.scala
U core/src/main/scala/kafka/server/ReplicaManager.scala
U core/src/main/scala/kafka/server/KafkaController.scala
U core/src/main/scala/kafka/server/KafkaRequestHandler.scala
U core/src/main/scala/kafka/server/KafkaApis.scala
U core/src/main/scala/kafka/server/AbstractFetcherThread.scala
U core/src/main/scala/kafka/api/StopReplicaRequest.scala
U core/src/main/scala/kafka/api/OffsetRequest.scala
U core/src/main/scala/kafka/api/FetchRequest.scala
U core/src/main/scala/kafka/api/RequestKeys.scala
U core/src/main/scala/kafka/api/LeaderAndIsrRequest.scala
U core/src/main/scala/kafka/api/TopicMetadataRequest.scala
U core/src/main/scala/kafka/api/ProducerRequest.scala
U core/src/main/scala/kafka/javaapi/ProducerRequest.scala
At revision 1384629
[Kafka-0.8] $ /bin/bash -xe /var/tmp/hudson3795267082552113480.sh
+ ./sbt update
[0m[[0minfo[0m] [0mBuilding project Kafka 0.8.0 against Scala 2.8.0[0m
[0m[[0minfo[0m] [0m using KafkaProject with sbt 0.7.5 and Scala 2.7.7[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#core-kafka_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 12 already retrieved (0kB/87ms)[0m
[0m[[0minfo[0m] [34m== core-kafka / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#hadoop-producer_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 40 already retrieved (0kB/66ms)[0m
[0m[[0minfo[0m] [34m== hadoop producer / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#hadoop-consumer_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 37 already retrieved (0kB/63ms)[0m
[0m[[0minfo[0m] [34m== hadoop consumer / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== contrib / update ==[0m
[0m[[33mwarn[0m] [0mNo dependency configuration found, using defaults.[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#contrib_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [default][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 0 already retrieved (0kB/2ms)[0m
[0m[[0minfo[0m] [34m== contrib / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#perf_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 5 already retrieved (0kB/41ms)[0m
[0m[[0minfo[0m] [34m== perf / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#java-examples_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 5 already retrieved (0kB/27ms)[0m
[0m[[0minfo[0m] [34m== java-examples / update ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== Kafka / update ==[0m
[0m[[0minfo[0m] [0m:: retrieving :: kafka#kafka_2.8.0 [sync][0m
[0m[[0minfo[0m] [0m confs: [compile, runtime, test, provided, system, optional, sources, javadoc][0m
[0m[[0minfo[0m] [0m 0 artifacts copied, 8 already retrieved (0kB/223ms)[0m
[0m[[0minfo[0m] [34m== Kafka / update ==[0m
[0m[[32msuccess[0m] [0mSuccessful.[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 27 s, completed Sep 14, 2012 2:12:20 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 32 s, completed Sep 14, 2012 2:12:20 AM[0m
[0m[[32msuccess[0m] [0mBuild completed successfully.[0m
[Kafka-0.8] $ /bin/bash -xe /var/tmp/hudson3758644180412537828.sh
+ ./sbt test
[0m[[0minfo[0m] [0mBuilding project Kafka 0.8.0 against Scala 2.8.0[0m
[0m[[0minfo[0m] [0m using KafkaProject with sbt 0.7.5 and Scala 2.7.7[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 39 new/modified, 83 indirectly invalidated, 3 removed.[0m
[0m[[0minfo[0m] [0mCompiling main sources...[0m
[0m[[33mwarn[0m] [0mthere were unchecked warnings; re-run with -unchecked for details[0m
[0m[[33mwarn[0m] [0mone warning found[0m
java.io.IOException: Cannot run program "javac": error=12, Not enough space
at java.lang.ProcessBuilder.start(ProcessBuilder.java:460)
at sbt.SimpleProcessBuilder.run(ProcessImpl.scala:381)
at sbt.AbstractProcessBuilder.run(ProcessImpl.scala:132)
at sbt.AbstractProcessBuilder$$anonfun$runBuffered$1.apply(ProcessImpl.scala:165)
at sbt.AbstractProcessBuilder$$anonfun$runBuffered$1.apply(ProcessImpl.scala:165)
at sbt.BufferedLogger.bufferAll(Logger.scala:179)
at sbt.AbstractProcessBuilder.runBuffered(ProcessImpl.scala:165)
at sbt.AbstractProcessBuilder.$bang(ProcessImpl.scala:160)
at sbt.Compile.externalJavac$1(Compile.scala:94)
at sbt.Compile$$anonfun$forkJavac$2.apply(Compile.scala:95)
at sbt.Compile$$anonfun$forkJavac$2.apply(Compile.scala:95)
at sbt.WithArgumentFile$$anonfun$withArgumentFile$1.apply(Compile.scala:117)
at sbt.WithArgumentFile$$anonfun$withArgumentFile$1.apply(Compile.scala:114)
at xsbt.FileUtilities$.withTemporaryDirectory(FileUtilities.scala:169)
at sbt.WithArgumentFile$class.withArgumentFile(Compile.scala:114)
at sbt.Compile.withArgumentFile(Compile.scala:71)
at sbt.Compile.forkJavac(Compile.scala:95)
at sbt.Compile.processJava(Compile.scala:86)
at sbt.CompilerCore$$anonfun$2.apply(Compile.scala:28)
at sbt.CompilerCore$$anonfun$2.apply(Compile.scala:28)
at sbt.CompilerCore$$anonfun$process$1$1.apply(Compile.scala:22)
at sbt.CompilerCore$$anonfun$process$1$1.apply(Compile.scala:22)
at sbt.CompilerCore$$anonfun$doCompile$3.apply(Compile.scala:46)
at sbt.CompilerCore$$anonfun$doCompile$3.apply(Compile.scala:42)
at scala.Option.orElse(Option.scala:102)
at sbt.CompilerCore.doCompile(Compile.scala:41)
at sbt.CompilerCore.apply(Compile.scala:29)
at sbt.AbstractCompileConditional.run$1(Conditional.scala:341)
at sbt.AbstractCompileConditional$$anonfun$3.apply(Conditional.scala:344)
at sbt.AbstractCompileConditional$$anonfun$3.apply(Conditional.scala:344)
at sbt.classfile.Analyze$.apply(Analyze.scala:110)
at sbt.AbstractCompileConditional.execute(Conditional.scala:344)
at sbt.Conditional$class.run(Conditional.scala:43)
at sbt.AbstractCompileConditional.run(Conditional.scala:231)
at sbt.BasicScalaProject.sbt$BasicScalaProject$$doCompile(DefaultProject.scala:260)
at sbt.BasicScalaProject$$anonfun$compileAction$1.apply(DefaultProject.scala:274)
at sbt.BasicScalaProject$$anonfun$compileAction$1.apply(DefaultProject.scala:274)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.io.IOException: error=12, Not enough space
at java.lang.UNIXProcess.forkAndExec(Native Method)
at java.lang.UNIXProcess.<init>(UNIXProcess.java:53)
at java.lang.ProcessImpl.start(ProcessImpl.java:65)
at java.lang.ProcessBuilder.start(ProcessBuilder.java:453)
at sbt.SimpleProcessBuilder.run(ProcessImpl.scala:381)
at sbt.AbstractProcessBuilder.run(ProcessImpl.scala:132)
at sbt.AbstractProcessBuilder$$anonfun$runBuffered$1.apply(ProcessImpl.scala:165)
at sbt.AbstractProcessBuilder$$anonfun$runBuffered$1.apply(ProcessImpl.scala:165)
at sbt.BufferedLogger.bufferAll(Logger.scala:179)
at sbt.AbstractProcessBuilder.runBuffered(ProcessImpl.scala:165)
at sbt.AbstractProcessBuilder.$bang(ProcessImpl.scala:160)
at sbt.Compile.externalJavac$1(Compile.scala:94)
at sbt.Compile$$anonfun$forkJavac$2.apply(Compile.scala:95)
at sbt.Compile$$anonfun$forkJavac$2.apply(Compile.scala:95)
at sbt.WithArgumentFile$$anonfun$withArgumentFile$1.apply(Compile.scala:117)
at sbt.WithArgumentFile$$anonfun$withArgumentFile$1.apply(Compile.scala:114)
at xsbt.FileUtilities$.withTemporaryDirectory(FileUtilities.scala:169)
at sbt.WithArgumentFile$class.withArgumentFile(Compile.scala:114)
at sbt.Compile.withArgumentFile(Compile.scala:71)
at sbt.Compile.forkJavac(Compile.scala:95)
at sbt.Compile.processJava(Compile.scala:86)
at sbt.CompilerCore$$anonfun$2.apply(Compile.scala:28)
at sbt.CompilerCore$$anonfun$2.apply(Compile.scala:28)
at sbt.CompilerCore$$anonfun$process$1$1.apply(Compile.scala:22)
at sbt.CompilerCore$$anonfun$process$1$1.apply(Compile.scala:22)
at sbt.CompilerCore$$anonfun$doCompile$3.apply(Compile.scala:46)
at sbt.CompilerCore$$anonfun$doCompile$3.apply(Compile.scala:42)
at scala.Option.orElse(Option.scala:102)
at sbt.CompilerCore.doCompile(Compile.scala:41)
at sbt.CompilerCore.apply(Compile.scala:29)
at sbt.AbstractCompileConditional.run$1(Conditional.scala:341)
at sbt.AbstractCompileConditional$$anonfun$3.apply(Conditional.scala:344)
at sbt.AbstractCompileConditional$$anonfun$3.apply(Conditional.scala:344)
at sbt.classfile.Analyze$.apply(Analyze.scala:110)
at sbt.AbstractCompileConditional.execute(Conditional.scala:344)
at sbt.Conditional$class.run(Conditional.scala:43)
at sbt.AbstractCompileConditional.run(Conditional.scala:231)
at sbt.BasicScalaProject.sbt$BasicScalaProject$$doCompile(DefaultProject.scala:260)
at sbt.BasicScalaProject$$anonfun$compileAction$1.apply(DefaultProject.scala:274)
at sbt.BasicScalaProject$$anonfun$compileAction$1.apply(DefaultProject.scala:274)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [34m== core-kafka / compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== core-kafka / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / copy-resources ==[0m
[0m[[0minfo[0m] [34m== core-kafka / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[31merror[0m] [0mError running compile: java.io.IOException: Cannot run program "javac": error=12, Not enough space[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 106 s, completed Sep 14, 2012 2:14:17 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 109 s, completed Sep 14, 2012 2:14:17 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #42
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/42/changes>
Changes:
[junrao] Improve Kafka internal metrics; patched by Jun Rao; reviewed by Joel Koshy and Neha Narkhede; KAFKA-203
------------------------------------------
[...truncated 834 lines...]
at kafka.server.ReplicaManager.becomeLeaderOrFollower(ReplicaManager.scala:144)
at kafka.server.KafkaApis.handleLeaderAndISRRequest(KafkaApis.scala:73)
at kafka.server.KafkaApis.handle(KafkaApis.scala:60)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:40)
at java.lang.Thread.run(Thread.java:662)
[0m[[0minfo[0m] [0mTest Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Passed: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Starting: testBlacklists[0m
[0m[[0minfo[0m] [0mTest Passed: testBlacklists[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testKafkaTimer(unit.kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / unit.kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresSingleLogSegment(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointNoFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHWCheckpointWithFailuresMultipleLogSegments(kafka.server.LogRecoveryTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LogRecoveryTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompression(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressionSetConsumption(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerDecoder(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderSelectionForPartition(kafka.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadInvalidLogsFails[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Starting: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Passed: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeCheck[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeCheck[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogCorruptionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeTooLarge(kafka.log.LogCorruptionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeTooLarge(kafka.log.LogCorruptionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogCorruptionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.FetcherTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFetcher(kafka.consumer.FetcherTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetcher(kafka.consumer.FetcherTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.FetcherTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConfigurablePartitions(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConfigurablePartitions(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProducerQueueSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProducerQueueSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.SegmentListTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testAppend[0m
[0m[[0minfo[0m] [0mTest Passed: testAppend[0m
[0m[[0minfo[0m] [0mTest Starting: testTrunc[0m
[0m[[0minfo[0m] [0mTest Passed: testTrunc[0m
[0m[[0minfo[0m] [0mTest Starting: testTruncLast[0m
[0m[[0minfo[0m] [0mTest Passed: testTruncLast[0m
[0m[[0minfo[0m] [0mTest Starting: testTruncBeyondList[0m
[0m[[0minfo[0m] [0mTest Passed: testTruncBeyondList[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.SegmentListTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== hadoop consumer / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== hadoop producer / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 138, Failed 1, Errors 0, Passed 137, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_281659f6[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.integration.PrimitiveApiTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 211 s, completed Sep 13, 2012 5:13:38 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 211 s, completed Sep 13, 2012 5:13:38 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #41
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/41/changes>
Changes:
[junrao] Handle topic names with / on Kafka server (0.8 branch); patched by Swapnil Ghike; reviewed by Jay Kreps, Joel Koshy and Jun Rao; KAFKA-495
------------------------------------------
[...truncated 1084 lines...]
at sun.nio.ch.Net.connect(Net.java:356)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:623)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:148)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:169)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:109)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.BrokerPartitionInfo.getBrokerPartitionInfo(BrokerPartitionInfo.scala:48)
at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$getPartitionListForTopic(DefaultEventHandler.scala:139)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:100)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:99)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
at kafka.producer.async.DefaultEventHandler.partitionAndCollate(DefaultEventHandler.scala:99)
at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:64)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:47)
at kafka.producer.Producer.send(Producer.scala:65)
at kafka.producer.ProducerTest.testUpdateBrokerPartitionInfo(ProducerTest.scala:116)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-09-11 22:04:03,574] ERROR Connection attempt to localhost:80 failed, next attempt in 100 ms (kafka.producer.SyncProducer:102)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:364)
at sun.nio.ch.Net.connect(Net.java:356)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:623)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:148)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:169)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:109)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.BrokerPartitionInfo.getBrokerPartitionInfo(BrokerPartitionInfo.scala:48)
at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$getPartitionListForTopic(DefaultEventHandler.scala:139)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:100)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:99)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
at kafka.producer.async.DefaultEventHandler.partitionAndCollate(DefaultEventHandler.scala:99)
at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:64)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:47)
at kafka.producer.Producer.send(Producer.scala:65)
at kafka.producer.ProducerTest.testUpdateBrokerPartitionInfo(ProducerTest.scala:116)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-09-11 22:04:03,677] ERROR Connection attempt to localhost:80 failed, next attempt in 1000 ms (kafka.producer.SyncProducer:102)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:364)
at sun.nio.ch.Net.connect(Net.java:356)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:623)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:148)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:169)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:109)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.BrokerPartitionInfo.getBrokerPartitionInfo(BrokerPartitionInfo.scala:48)
at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$getPartitionListForTopic(DefaultEventHandler.scala:139)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:100)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:99)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
at kafka.producer.async.DefaultEventHandler.partitionAndCollate(DefaultEventHandler.scala:99)
at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:64)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:47)
at kafka.producer.Producer.send(Producer.scala:65)
at kafka.producer.ProducerTest.testUpdateBrokerPartitionInfo(ProducerTest.scala:116)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-09-11 22:04:04,684] ERROR Producer connection to localhost:80 timing out after 5000 ms (kafka.producer.SyncProducer:102)
java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect0(Native Method)
at sun.nio.ch.Net.connect(Net.java:364)
at sun.nio.ch.Net.connect(Net.java:356)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:623)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:148)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:169)
at kafka.producer.SyncProducer.doSend(SyncProducer.scala:74)
at kafka.producer.SyncProducer.send(SyncProducer.scala:109)
at kafka.producer.BrokerPartitionInfo.updateInfo(BrokerPartitionInfo.scala:82)
at kafka.producer.BrokerPartitionInfo.getBrokerPartitionInfo(BrokerPartitionInfo.scala:48)
at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$getPartitionListForTopic(DefaultEventHandler.scala:139)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:100)
at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:99)
at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
at kafka.producer.async.DefaultEventHandler.partitionAndCollate(DefaultEventHandler.scala:99)
at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:64)
at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:47)
at kafka.producer.Producer.send(Producer.scala:65)
at kafka.producer.ProducerTest.testUpdateBrokerPartitionInfo(ProducerTest.scala:116)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
at java.lang.reflect.Method.invoke(Method.java:601)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-09-11 22:04:04,906] FATAL [Kafka Log on Broker 0], Halting due to unrecoverable I/O error while handling producer request (kafka.log.Log:115)
java.nio.channels.ClosedByInterruptException
at java.nio.channels.spi.AbstractInterruptibleChannel.end(AbstractInterruptibleChannel.java:202)
at sun.nio.ch.FileChannelImpl.force(FileChannelImpl.java:367)
at kafka.message.FileMessageSet.flush(FileMessageSet.scala:161)
at kafka.log.Log.flush(Log.scala:372)
at kafka.log.Log.maybeFlush(Log.scala:359)
at kafka.log.Log.liftedTree1$1(Log.scala:266)
at kafka.log.Log.append(Log.scala:261)
at kafka.server.ReplicaFetcherThread.processPartitionData(ReplicaFetcherThread.scala:40)
at kafka.server.AbstractFetcherThread$$anonfun$doWork$5$$anonfun$apply$1.apply(AbstractFetcherThread.scala:95)
at kafka.server.AbstractFetcherThread$$anonfun$doWork$5$$anonfun$apply$1.apply(AbstractFetcherThread.scala:87)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
at scala.collection.mutable.ArrayOps.foreach(ArrayOps.scala:34)
at kafka.server.AbstractFetcherThread$$anonfun$doWork$5.apply(AbstractFetcherThread.scala:87)
at kafka.server.AbstractFetcherThread$$anonfun$doWork$5.apply(AbstractFetcherThread.scala:86)
at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
at scala.collection.mutable.ArrayOps.foreach(ArrayOps.scala:34)
at kafka.server.AbstractFetcherThread.doWork(AbstractFetcherThread.scala:86)
at kafka.utils.ShutdownableThread.run(ShutdownableThread.scala:50)
Build step 'Execute shell' marked build as failure