You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/11/17 03:12:12 UTC

Build failed in Jenkins: Kafka-0.8 #111

See <https://builds.apache.org/job/Kafka-0.8/111/changes>

Changes:

[jjkoshy] Fix deadlock between leader-finder-thread and consumer-fetcher-thread during broker failure; patched by Joel Koshy; reviewed by Jun Rao; KAFKA-618
pre-commit-status-crumb=5e65bf7a-f347-4600-b3ae-99eed1cd2a78

[junrao] MigrationTool should disable shallow iteration in the 0.7 consumer; patched by Yang Ye; reviewed by Jun Rao; KAFKA-613

------------------------------------------
[...truncated 3115 lines...]
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:120)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.net.ConnectException: Connection refused
	at sun.nio.ch.Net.connect(Native Method)
	at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:500)
	at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
	at kafka.producer.SyncProducer.connect(SyncProducer.scala:135)
	at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:150)
	at kafka.producer.SyncProducer.kafka$producer$SyncProducer$$doSend(SyncProducer.scala:71)
	at kafka.producer.SyncProducer.send(SyncProducer.scala:101)
	at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:25)
	... 42 more
[info] Test Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)
[info] Test Starting: testJavaProducer(kafka.producer.AsyncProducerTest)
[info] Test Passed: testJavaProducer(kafka.producer.AsyncProducerTest)
[info] Test Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] Test Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] == core-kafka / kafka.producer.AsyncProducerTest ==
[info] 
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] Test Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[2012-11-17 02:11:49,815] ERROR Closing socket for /127.0.0.1 because of error (kafka.network.Processor:102)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
	at sun.nio.ch.IOUtil.read(IOUtil.java:171)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
	at kafka.utils.Utils$.read(Utils.scala:393)
	at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
	at kafka.network.Processor.read(SocketServer.scala:293)
	at kafka.network.Processor.run(SocketServer.scala:209)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-17 02:11:49,815] ERROR Closing socket for /127.0.0.1 because of error (kafka.network.Processor:102)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.write0(Native Method)
	at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
	at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:69)
	at sun.nio.ch.IOUtil.write(IOUtil.java:40)
	at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
	at kafka.api.PartitionDataSend.writeTo(FetchResponse.scala:76)
	at kafka.network.MultiSend.writeTo(Transmission.scala:94)
	at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
	at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
	at kafka.api.TopicDataSend.writeTo(FetchResponse.scala:133)
	at kafka.network.MultiSend.writeTo(Transmission.scala:94)
	at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
	at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
	at kafka.api.FetchResponseSend.writeTo(FetchResponse.scala:232)
	at kafka.network.Processor.write(SocketServer.scala:318)
	at kafka.network.Processor.run(SocketServer.scala:211)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-17 02:11:50,118] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x13b0c22d6da0009, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[info] Test Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] 
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] Test Starting: simpleRequest
[info] Test Passed: simpleRequest
[info] Test Starting: tooBigRequestIsRejected
[info] Test Passed: tooBigRequestIsRejected
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] 
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] Test Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[info] Test Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] 
[info] == core-kafka / kafka.log.LogOffsetTest ==
[info] Test Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)
[info] Test Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)
[info] Test Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)
[info] == core-kafka / kafka.log.LogOffsetTest ==
[info] 
[info] == core-kafka / kafka.log.FileMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testWriteTo
[info] Test Passed: testWriteTo
[info] Test Starting: testFileSize
[info] Test Passed: testFileSize
[info] Test Starting: testIterationOverPartialAndTruncation
[info] Test Passed: testIterationOverPartialAndTruncation
[info] Test Starting: testIterationDoesntChangePosition
[info] Test Passed: testIterationDoesntChangePosition
[info] Test Starting: testRead
[info] Test Passed: testRead
[info] Test Starting: testSearch
[info] Test Passed: testSearch
[info] == core-kafka / kafka.log.FileMessageSetTest ==
[info] 
[info] == core-kafka / kafka.consumer.ConsumerIteratorTest ==
[info] Test Starting: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)
[info] Test Passed: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)
[info] == core-kafka / kafka.consumer.ConsumerIteratorTest ==
[info] 
[info] == core-kafka / kafka.integration.PrimitiveApiTest ==
[info] Test Starting: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testMultiProduce(kafka.integration.PrimitiveApiTest)
[error] Test Failed: testMultiProduce(kafka.integration.PrimitiveApiTest)
junit.framework.AssertionFailedError: expected:<List(a_test4, b_test4)> but was:<List(a_test4, b_test4, a_test4, b_test4)>
	at junit.framework.Assert.fail(Assert.java:47)
	at junit.framework.Assert.failNotEquals(Assert.java:277)
	at junit.framework.Assert.assertEquals(Assert.java:64)
	at junit.framework.Assert.assertEquals(Assert.java:71)
	at kafka.integration.PrimitiveApiTest$$anonfun$testMultiProduce$2.apply(PrimitiveApiTest.scala:281)
	at kafka.integration.PrimitiveApiTest$$anonfun$testMultiProduce$2.apply(PrimitiveApiTest.scala:279)
	at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
	at scala.collection.immutable.List.foreach(List.scala:45)
	at kafka.integration.PrimitiveApiTest.testMultiProduce(PrimitiveApiTest.scala:279)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:164)
	at junit.framework.TestCase.runBare(TestCase.java:130)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:120)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[info] Test Starting: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)
[info] == core-kafka / kafka.integration.PrimitiveApiTest ==
[info] 
[info] == core-kafka / kafka.api.RequestResponseSerializationTest ==
[info] Test Starting: testSerializationAndDeserialization
[info] Test Passed: testSerializationAndDeserialization
[info] == core-kafka / kafka.api.RequestResponseSerializationTest ==
[info] 
[info] == core-kafka / kafka.message.ByteBufferMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testEquals
[info] Test Passed: testEquals
[info] Test Starting: testWriteTo
[info] Test Passed: testWriteTo
[info] Test Starting: testValidBytes
[info] Test Passed: testValidBytes
[info] Test Starting: testValidBytesWithCompression
[info] Test Passed: testValidBytesWithCompression
[info] Test Starting: testIterator
[info] Test Passed: testIterator
[info] Test Starting: testOffsetAssignment
[info] Test Passed: testOffsetAssignment
[info] == core-kafka / kafka.message.ByteBufferMessageSetTest ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /tmp/sbt_f00e324e
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 166, Failed 2, Errors 0, Passed 164, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[error] Error running kafka.server.LogRecoveryTest: Test FAILED
[error] Error running kafka.integration.PrimitiveApiTest: Test FAILED
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 266 s, completed Nov 17, 2012 2:12:11 AM
[info] 
[info] Total session time: 266 s, completed Nov 17, 2012 2:12:11 AM
[error] Error during build.
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: Kafka-0.8 #114

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/114/changes>

Changes:

[jkreps] KAFKA-544. Follow-up items on key-retention. Addresses misc. comments from Joel, see ticket for details.

------------------------------------------
[...truncated 2024 lines...]
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:164)
	at junit.framework.TestCase.runBare(TestCase.java:130)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:120)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[info] == core-kafka / kafka.admin.AdminTest ==
[info] 
[info] == core-kafka / kafka.integration.AutoOffsetResetTest ==
[info] Test Starting: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] == core-kafka / kafka.integration.AutoOffsetResetTest ==
[info] 
[info] == core-kafka / kafka.server.ServerShutdownTest ==
[info] Test Starting: testCleanShutdown(kafka.server.ServerShutdownTest)
[info] Test Passed: testCleanShutdown(kafka.server.ServerShutdownTest)
[info] == core-kafka / kafka.server.ServerShutdownTest ==
[info] 
[info] == core-kafka / kafka.producer.AsyncProducerTest ==
[info] Test Starting: testProducerQueueSize(kafka.producer.AsyncProducerTest)
[info] Test Passed: testProducerQueueSize(kafka.producer.AsyncProducerTest)
[info] Test Starting: testProduceAfterClosed(kafka.producer.AsyncProducerTest)
[info] Test Passed: testProduceAfterClosed(kafka.producer.AsyncProducerTest)
[info] Test Starting: testBatchSize(kafka.producer.AsyncProducerTest)
[info] Test Passed: testBatchSize(kafka.producer.AsyncProducerTest)
[info] Test Starting: testQueueTimeExpired(kafka.producer.AsyncProducerTest)
[info] Test Passed: testQueueTimeExpired(kafka.producer.AsyncProducerTest)
[info] Test Starting: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)
[info] Test Passed: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)
[info] Test Starting: testSerializeEvents(kafka.producer.AsyncProducerTest)
[info] Test Passed: testSerializeEvents(kafka.producer.AsyncProducerTest)
[info] Test Starting: testInvalidPartition(kafka.producer.AsyncProducerTest)
[info] Test Passed: testInvalidPartition(kafka.producer.AsyncProducerTest)
[info] Test Starting: testNoBroker(kafka.producer.AsyncProducerTest)
[info] Test Passed: testNoBroker(kafka.producer.AsyncProducerTest)
[info] Test Starting: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)
[info] Test Passed: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)
[info] Test Starting: testRandomPartitioner(kafka.producer.AsyncProducerTest)
[info] Test Passed: testRandomPartitioner(kafka.producer.AsyncProducerTest)
[info] Test Starting: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)
[info] Test Passed: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)
[info] Test Starting: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)
[info] Test Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)
[info] Test Starting: testJavaProducer(kafka.producer.AsyncProducerTest)
[info] Test Passed: testJavaProducer(kafka.producer.AsyncProducerTest)
[info] Test Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] Test Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] == core-kafka / kafka.producer.AsyncProducerTest ==
[info] 
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] Test Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] Test Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] 
[info] == core-kafka / kafka.message.MessageCompressionTest ==
[info] Test Starting: testSimpleCompressDecompress
java.lang.reflect.InvocationTargetException
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:317)
	at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:219)
	at org.xerial.snappy.Snappy.<clinit>(Snappy.java:44)
	at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:79)
	at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:66)
	at kafka.message.MessageCompressionTest.isSnappyAvailable(MessageCompressionTest.scala:57)
	at kafka.message.MessageCompressionTest.testSimpleCompressDecompress(MessageCompressionTest.scala:31)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at org.junit.internal.runners.TestMethodRunner.executeMethodBody(TestMethodRunner.java:99)
	at org.junit.internal.runners.TestMethodRunner.runUnprotected(TestMethodRunner.java:81)
	at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
	at org.junit.internal.runners.TestMethodRunner.runMethod(TestMethodRunner.java:75)
	at org.junit.internal.runners.TestMethodRunner.run(TestMethodRunner.java:45)
	at org.junit.internal.runners.TestClassMethodsRunner.invokeTestMethod(TestClassMethodsRunner.java:71)
	at org.junit.internal.runners.TestClassMethodsRunner.run(TestClassMethodsRunner.java:35)
	at org.junit.internal.runners.TestClassRunner$1.runUnprotected(TestClassRunner.java:42)
	at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
	at org.junit.internal.runners.TestClassRunner.run(TestClassRunner.java:52)
	at org.junit.internal.runners.CompositeRunner.run(CompositeRunner.java:29)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:121)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:100)
	at org.junit.runner.JUnitCore.run(JUnitCore.java:91)
	at org.scalatest.junit.JUnitSuite$class.run(JUnitSuite.scala:261)
	at kafka.message.MessageCompressionTest.run(MessageCompressionTest.scala:26)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.lang.UnsatisfiedLinkError: no snappyjava in java.library.path
	at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1734)
	at java.lang.Runtime.loadLibrary0(Runtime.java:823)
	at java.lang.System.loadLibrary(System.java:1028)
	at org.xerial.snappy.SnappyNativeLoader.loadLibrary(SnappyNativeLoader.java:52)
	... 51 more
[info] Test Passed: testSimpleCompressDecompress
[info] Test Starting: testComplexCompressDecompress
[info] Test Passed: testComplexCompressDecompress
[info] == core-kafka / kafka.message.MessageCompressionTest ==
[info] 
[info] == core-kafka / kafka.log.LogTest ==
[info] Test Starting: testTimeBasedLogRoll
[info] Test Passed: testTimeBasedLogRoll
[info] Test Starting: testSizeBasedLogRoll
[info] Test Passed: testSizeBasedLogRoll
[info] Test Starting: testLoadEmptyLog
[info] Test Passed: testLoadEmptyLog
[info] Test Starting: testAppendAndRead
[info] Test Passed: testAppendAndRead
[info] Test Starting: testReadOutOfRange
[info] Test Passed: testReadOutOfRange
[info] Test Starting: testLogRolls
[info] Test Passed: testLogRolls
[info] Test Starting: testCompressedMessages
[info] Test Passed: testCompressedMessages
[info] Test Starting: testFindSegment
[info] Test Passed: testFindSegment
[info] Test Starting: testEdgeLogRollsStartingAtZero
[info] Test Passed: testEdgeLogRollsStartingAtZero
[info] Test Starting: testEdgeLogRollsStartingAtNonZero
[info] Test Passed: testEdgeLogRollsStartingAtNonZero
[info] Test Starting: testMessageSizeCheck
[info] Test Passed: testMessageSizeCheck
[info] Test Starting: testLogRecoversToCorrectOffset
[info] Test Passed: testLogRecoversToCorrectOffset
[info] Test Starting: testTruncateTo
[info] Test Passed: testTruncateTo
[info] Test Starting: testIndexResizingAtTruncation
[info] Test Passed: testIndexResizingAtTruncation
[info] Test Starting: testAppendWithoutOffsetAssignment
[info] Test Passed: testAppendWithoutOffsetAssignment
[info] Test Starting: testReopenThenTruncate
[info] Test Passed: testReopenThenTruncate
[info] == core-kafka / kafka.log.LogTest ==
[info] 
[info] == core-kafka / kafka.log.FileMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testWriteTo
[info] Test Passed: testWriteTo
[info] Test Starting: testFileSize
[info] Test Passed: testFileSize
[info] Test Starting: testIterationOverPartialAndTruncation
[info] Test Passed: testIterationOverPartialAndTruncation
[info] Test Starting: testIterationDoesntChangePosition
[info] Test Passed: testIterationDoesntChangePosition
[info] Test Starting: testRead
[info] Test Passed: testRead
[info] Test Starting: testSearch
[info] Test Passed: testSearch
[info] == core-kafka / kafka.log.FileMessageSetTest ==
[info] 
[info] == core-kafka / kafka.server.ReplicaFetchTest ==
[info] Test Starting: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)
[info] Test Passed: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)
[info] == core-kafka / kafka.server.ReplicaFetchTest ==
[info] 
[info] == core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testEquals
[info] Test Passed: testEquals
[info] Test Starting: testIteratorIsConsistentWithCompression
[info] Test Passed: testIteratorIsConsistentWithCompression
[info] Test Starting: testSizeInBytesWithCompression
[info] Test Passed: testSizeInBytesWithCompression
[info] Test Starting: testEqualsWithCompression
[info] Test Passed: testEqualsWithCompression
[info] == core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 167, Failed 1, Errors 0, Passed 166, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /var/tmp/sbt_32c34d6e
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[error] Error running kafka.admin.AdminTest: Test FAILED
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 305 s, completed Nov 26, 2012 9:09:22 PM
[info] 
[info] Total session time: 306 s, completed Nov 26, 2012 9:09:22 PM
[error] Error during build.
Build step 'Execute shell' marked build as failure

Re: Build failed in Jenkins: Kafka-0.8 #113

Posted by Jay Kreps <ja...@gmail.com>.
Hey Jun,

I think AdminTest.testShutdownBroker started failing consistently after
this checkin.

-Jay


On Sun, Nov 18, 2012 at 10:09 PM, Apache Jenkins Server <
jenkins@builds.apache.org> wrote:

> See <https://builds.apache.org/job/Kafka-0.8/113/changes>
>
> Changes:
>
> [junrao] move shutting down of fetcher thread out of critical path;
> patched by Jun Rao; reviewed by Neha Narkhede; KAFKA-612
>
> ------------------------------------------
> [...truncated 2862 lines...]
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:14,085] ERROR [KafkaApi-0] error when processing request
> (test1,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1
> doesn't exist on 0
>         at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testMultiProduce(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testMultiProduce(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
> [2012-11-19 06:09:18,635] ERROR [KafkaApi-0] error when processing request
> (test2,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
>         at kafka.log.Log.read(Log.scala:371)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request
> (test3,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
>         at kafka.log.Log.read(Log.scala:371)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request
> (test4,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
>         at kafka.log.Log.read(Log.scala:371)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,637] ERROR [KafkaApi-0] error when processing request
> (test1,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
>         at kafka.log.Log.read(Log.scala:371)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,639] ERROR [KafkaApi-0] error when processing request
> (test2,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test2 partition -1
> doesn't exist on 0
>         at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request
> (test3,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test3 partition -1
> doesn't exist on 0
>         at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request
> (test4,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test4 partition -1
> doesn't exist on 0
>         at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,641] ERROR [KafkaApi-0] error when processing request
> (test1,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1
> doesn't exist on 0
>         at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
>         at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
>         at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
>         at scala.collection.immutable.Map$Map4.map(Map.scala:157)
>         at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
>         at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
>         at java.lang.Thread.run(Thread.java:662)
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.PrimitiveApiTest
> == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest
> == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
> log4j:WARN No appenders could be found for logger
> (org.I0Itec.zkclient.ZkEventThread).
> log4j:WARN Please initialize the log4j system properly.
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest
> == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka /
> kafka.message.ByteBufferMessageSetTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testWrittenEqualsRead [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testWrittenEqualsRead [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testIteratorIsConsistent [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testIteratorIsConsistent [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testSizeInBytes [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testSizeInBytes [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testEquals [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testEquals [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testWriteTo [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testWriteTo [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testValidBytes [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testValidBytes [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testValidBytesWithCompression [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testValidBytesWithCompression [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testIterator [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testIterator [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testOffsetAssignment [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testOffsetAssignment [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka /
> kafka.message.ByteBufferMessageSetTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.producer.ProducerTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testSendToNewTopic(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testSendToNewTopic(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testSendWithDeadBroker(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testSendWithDeadBroker(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.producer.ProducerTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.FetcherTest ==
> [0m
>  [0m[ [0minfo [0m]  [0mTest Starting:
> testFetcher(kafka.integration.FetcherTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed:
> testFetcher(kafka.integration.FetcherTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.FetcherTest ==
> [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / Test cleanup 1 == [0m
>  [0m[ [0minfo [0m]  [0mDeleting directory /tmp/sbt_b5aaac46 [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / Test cleanup 1 == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-finish == [0m
>  [0m[ [31merror [0m]  [0mFailed: : Total 167, Failed 2, Errors 0, Passed
> 165, Skipped 0 [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-finish == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-cleanup == [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-cleanup == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-test-resources == [0m
>  [0m[ [31merror [0m]  [0mError running kafka.server.LogRecoveryTest: Test
> FAILED [0m
>  [0m[ [31merror [0m]  [0mError running kafka.admin.AdminTest: Test FAILED
> [0m
>  [0m[ [31merror [0m]  [0mError running test: One or more subtasks failed
> [0m
>  [0m[ [0minfo [0m]  [0m [0m
>  [0m[ [0minfo [0m]  [0mTotal time: 233 s, completed Nov 19, 2012 6:09:42
> AM [0m
>  [0m[ [0minfo [0m]  [0m [0m
>  [0m[ [0minfo [0m]  [0mTotal session time: 233 s, completed Nov 19, 2012
> 6:09:42 AM [0m
>  [0m[ [31merror [0m]  [0mError during build. [0m
> Build step 'Execute shell' marked build as failure
>

Build failed in Jenkins: Kafka-0.8 #113

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/113/changes>

Changes:

[junrao] move shutting down of fetcher thread out of critical path; patched by Jun Rao; reviewed by Neha Narkhede; KAFKA-612

------------------------------------------
[...truncated 2862 lines...]
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:14,085] ERROR [KafkaApi-0] error when processing request (test1,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 doesn't exist on 0
	at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[info] Test Passed: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testMultiProduce(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testMultiProduce(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
[2012-11-19 06:09:18,635] ERROR [KafkaApi-0] error when processing request (test2,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
	at kafka.log.Log.read(Log.scala:371)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request (test3,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
	at kafka.log.Log.read(Log.scala:371)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request (test4,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
	at kafka.log.Log.read(Log.scala:371)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,637] ERROR [KafkaApi-0] error when processing request (test1,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
	at kafka.log.Log.read(Log.scala:371)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,639] ERROR [KafkaApi-0] error when processing request (test2,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test2 partition -1 doesn't exist on 0
	at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request (test3,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test3 partition -1 doesn't exist on 0
	at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request (test4,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test4 partition -1 doesn't exist on 0
	at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,641] ERROR [KafkaApi-0] error when processing request (test1,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 doesn't exist on 0
	at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
	at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
	at scala.collection.immutable.Map$Map4.map(Map.scala:157)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
	at java.lang.Thread.run(Thread.java:662)
[info] Test Passed: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)
[info] Test Starting: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)
[info] Test Passed: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)
[info] == core-kafka / kafka.integration.PrimitiveApiTest ==
[info] 
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] Test Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[info] Test Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] 
[info] == core-kafka / kafka.metrics.KafkaTimerTest ==
[info] Test Starting: testKafkaTimer(kafka.metrics.KafkaTimerTest)
[info] Test Passed: testKafkaTimer(kafka.metrics.KafkaTimerTest)
[info] == core-kafka / kafka.metrics.KafkaTimerTest ==
[info] 
[info] == core-kafka / kafka.message.ByteBufferMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testEquals
[info] Test Passed: testEquals
[info] Test Starting: testWriteTo
[info] Test Passed: testWriteTo
[info] Test Starting: testValidBytes
[info] Test Passed: testValidBytes
[info] Test Starting: testValidBytesWithCompression
[info] Test Passed: testValidBytesWithCompression
[info] Test Starting: testIterator
[info] Test Passed: testIterator
[info] Test Starting: testOffsetAssignment
[info] Test Passed: testOffsetAssignment
[info] == core-kafka / kafka.message.ByteBufferMessageSetTest ==
[info] 
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] Test Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)
[info] Test Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)
[info] Test Starting: testSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Passed: testSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] Test Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] 
[info] == core-kafka / kafka.integration.FetcherTest ==
[info] Test Starting: testFetcher(kafka.integration.FetcherTest)
[info] Test Passed: testFetcher(kafka.integration.FetcherTest)
[info] == core-kafka / kafka.integration.FetcherTest ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /tmp/sbt_b5aaac46
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 167, Failed 2, Errors 0, Passed 165, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[info] 
[info] == hadoop consumer / copy-test-resources ==
[info] == hadoop consumer / copy-test-resources ==
[error] Error running kafka.server.LogRecoveryTest: Test FAILED
[error] Error running kafka.admin.AdminTest: Test FAILED
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 233 s, completed Nov 19, 2012 6:09:42 AM
[info] 
[info] Total session time: 233 s, completed Nov 19, 2012 6:09:42 AM
[error] Error during build.
Build step 'Execute shell' marked build as failure

Build failed in Jenkins: Kafka-0.8 #112

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/112/changes>

Changes:

[nehanarkhede] KAFKA-532 Multiple controllers can co-exist during soft failures; patched by Neha Narkhede; reviewed by Jun Rao

------------------------------------------
[...truncated 1398 lines...]
[info] Test Starting: testReadOnEmptySegment(kafka.log.LogSegmentTest)
[info] Test Passed: testReadOnEmptySegment(kafka.log.LogSegmentTest)
[info] Test Starting: testReadBeforeFirstOffset(kafka.log.LogSegmentTest)
[info] Test Passed: testReadBeforeFirstOffset(kafka.log.LogSegmentTest)
[info] Test Starting: testReadSingleMessage(kafka.log.LogSegmentTest)
[info] Test Passed: testReadSingleMessage(kafka.log.LogSegmentTest)
[info] Test Starting: testReadAfterLast(kafka.log.LogSegmentTest)
[info] Test Passed: testReadAfterLast(kafka.log.LogSegmentTest)
[info] Test Starting: testReadFromGap(kafka.log.LogSegmentTest)
[info] Test Passed: testReadFromGap(kafka.log.LogSegmentTest)
[info] Test Starting: testTruncate(kafka.log.LogSegmentTest)
[info] Test Passed: testTruncate(kafka.log.LogSegmentTest)
[info] Test Starting: testTruncateFull(kafka.log.LogSegmentTest)
[info] Test Passed: testTruncateFull(kafka.log.LogSegmentTest)
[info] Test Starting: testNextOffsetCalculation(kafka.log.LogSegmentTest)
[info] Test Passed: testNextOffsetCalculation(kafka.log.LogSegmentTest)
[info] == core-kafka / kafka.log.LogSegmentTest ==
[info] 
[info] == core-kafka / kafka.server.RequestPurgatoryTest ==
[info] Test Starting: testRequestSatisfaction(kafka.server.RequestPurgatoryTest)
[info] Test Passed: testRequestSatisfaction(kafka.server.RequestPurgatoryTest)
[info] Test Starting: testRequestExpiry(kafka.server.RequestPurgatoryTest)
[info] Test Passed: testRequestExpiry(kafka.server.RequestPurgatoryTest)
[info] == core-kafka / kafka.server.RequestPurgatoryTest ==
[info] 
[info] == core-kafka / kafka.server.HighwatermarkPersistenceTest ==
[info] Test Starting: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)
[info] Test Passed: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)
[info] Test Starting: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)
[info] Test Passed: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)
[info] == core-kafka / kafka.server.HighwatermarkPersistenceTest ==
[info] 
[info] == core-kafka / kafka.log.LogOffsetTest ==
[info] Test Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)
[info] Test Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)
[info] Test Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)
[info] Test Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)
[info] Test Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)
[info] == core-kafka / kafka.log.LogOffsetTest ==
[info] 
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] Test Starting: simpleRequest
[info] Test Passed: simpleRequest
[info] Test Starting: tooBigRequestIsRejected
[info] Test Passed: tooBigRequestIsRejected
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] 
[info] == core-kafka / kafka.consumer.TopicFilterTest ==
[info] Test Starting: testWhitelists
[info] Test Passed: testWhitelists
[info] Test Starting: testBlacklists
[info] Test Passed: testBlacklists
[info] == core-kafka / kafka.consumer.TopicFilterTest ==
[info] 
[info] == core-kafka / kafka.integration.FetcherTest ==
[info] Test Starting: testFetcher(kafka.integration.FetcherTest)
[info] Test Passed: testFetcher(kafka.integration.FetcherTest)
[info] == core-kafka / kafka.integration.FetcherTest ==
[info] 
[info] == core-kafka / kafka.utils.UtilsTest ==
[info] Test Starting: testSwallow
[info] Test Passed: testSwallow
[info] Test Starting: testCircularIterator
[info] Test Passed: testCircularIterator
[info] Test Starting: testReadBytes
[info] Test Passed: testReadBytes
[info] == core-kafka / kafka.utils.UtilsTest ==
[info] 
[info] == core-kafka / kafka.log.LogManagerTest ==
[info] Test Starting: testCreateLog(kafka.log.LogManagerTest)
[info] Test Passed: testCreateLog(kafka.log.LogManagerTest)
[info] Test Starting: testGetLog(kafka.log.LogManagerTest)
[info] Test Passed: testGetLog(kafka.log.LogManagerTest)
[info] Test Starting: testCleanupExpiredSegments(kafka.log.LogManagerTest)
[info] Test Passed: testCleanupExpiredSegments(kafka.log.LogManagerTest)
[info] Test Starting: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)
[info] Test Passed: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)
[info] Test Starting: testTimeBasedFlush(kafka.log.LogManagerTest)
[info] Test Passed: testTimeBasedFlush(kafka.log.LogManagerTest)
[info] Test Starting: testLeastLoadedAssignment(kafka.log.LogManagerTest)
[info] Test Passed: testLeastLoadedAssignment(kafka.log.LogManagerTest)
[info] Test Starting: testTwoLogManagersUsingSameDirFails(kafka.log.LogManagerTest)
[info] Test Passed: testTwoLogManagersUsingSameDirFails(kafka.log.LogManagerTest)
[info] == core-kafka / kafka.log.LogManagerTest ==
[info] 
[info] == core-kafka / kafka.message.MessageTest ==
[info] Test Starting: testFieldValues
[info] Test Passed: testFieldValues
[info] Test Starting: testChecksum
[info] Test Passed: testChecksum
[info] Test Starting: testEquality
[info] Test Passed: testEquality
[info] Test Starting: testIsHashable
[info] Test Passed: testIsHashable
[info] == core-kafka / kafka.message.MessageTest ==
[info] 
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] Test Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] Test Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] 
[info] == core-kafka / kafka.integration.TopicMetadataTest ==
[info] Test Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)
[info] Test Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)
[info] Test Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)
[info] Test Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)
[info] Test Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)
[info] Test Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)
[info] == core-kafka / kafka.integration.TopicMetadataTest ==
[info] 
[info] == core-kafka / kafka.server.IsrExpirationTest ==
[info] Test Starting: testIsrExpirationForStuckFollowers(kafka.server.IsrExpirationTest)
[info] Test Passed: testIsrExpirationForStuckFollowers(kafka.server.IsrExpirationTest)
[info] Test Starting: testIsrExpirationForSlowFollowers(kafka.server.IsrExpirationTest)
[info] Test Passed: testIsrExpirationForSlowFollowers(kafka.server.IsrExpirationTest)
[info] == core-kafka / kafka.server.IsrExpirationTest ==
[info] 
[info] == core-kafka / kafka.integration.AutoOffsetResetTest ==
[info] Test Starting: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] == core-kafka / kafka.integration.AutoOffsetResetTest ==
[info] 
[info] == core-kafka / kafka.metrics.KafkaTimerTest ==
[info] Test Starting: testKafkaTimer(kafka.metrics.KafkaTimerTest)
[info] Test Passed: testKafkaTimer(kafka.metrics.KafkaTimerTest)
[info] == core-kafka / kafka.metrics.KafkaTimerTest ==
[info] 
[info] == core-kafka / kafka.server.SimpleFetchTest ==
[info] Test Starting: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)
[info] Test Passed: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)
[info] Test Starting: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)
[info] Test Passed: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)
[info] == core-kafka / kafka.server.SimpleFetchTest ==
[info] 
[info] == core-kafka / kafka.message.MessageCompressionTest ==
[info] Test Starting: testSimpleCompressDecompress
[info] Test Passed: testSimpleCompressDecompress
[info] Test Starting: testComplexCompressDecompress
[info] Test Passed: testComplexCompressDecompress
[info] == core-kafka / kafka.message.MessageCompressionTest ==
[info] 
[info] == core-kafka / kafka.api.RequestResponseSerializationTest ==
[info] Test Starting: testSerializationAndDeserialization
[info] Test Passed: testSerializationAndDeserialization
[info] == core-kafka / kafka.api.RequestResponseSerializationTest ==
[info] 
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] Test Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)
[info] Test Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)
[info] Test Starting: testSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Passed: testSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] Test Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] 
[info] == core-kafka / kafka.server.LeaderElectionTest ==
[info] Test Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)
[info] Test Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)
[info] Test Starting: testLeaderElectionWithStaleControllerEpoch(kafka.server.LeaderElectionTest)
[info] Test Passed: testLeaderElectionWithStaleControllerEpoch(kafka.server.LeaderElectionTest)
[info] == core-kafka / kafka.server.LeaderElectionTest ==
[info] 
[info] == core-kafka / kafka.integration.LazyInitProducerTest ==
[info] Test Starting: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)
[info] Test Passed: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)
[info] Test Starting: testMultiProduce(kafka.integration.LazyInitProducerTest)
[info] Test Passed: testMultiProduce(kafka.integration.LazyInitProducerTest)
[info] Test Starting: testProduceAndFetch(kafka.integration.LazyInitProducerTest)
[info] Test Passed: testProduceAndFetch(kafka.integration.LazyInitProducerTest)
[info] Test Starting: testMultiProduceResend(kafka.integration.LazyInitProducerTest)
[info] Test Passed: testMultiProduceResend(kafka.integration.LazyInitProducerTest)
[info] == core-kafka / kafka.integration.LazyInitProducerTest ==
[info] 
[info] == core-kafka / kafka.consumer.ConsumerIteratorTest ==
[info] Test Starting: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)
[info] Test Passed: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)
[info] == core-kafka / kafka.consumer.ConsumerIteratorTest ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /tmp/sbt_9ee7511c
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 167, Failed 1, Errors 0, Passed 166, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[info] 
[info] == java-examples / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == java-examples / test-compile ==
[info] 
[info] == perf / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == perf / test-compile ==
[info] 
[info] == hadoop producer / copy-resources ==
[info] == hadoop producer / copy-resources ==
[info] 
[info] == hadoop consumer / copy-test-resources ==
[info] == hadoop consumer / copy-test-resources ==
[info] 
[info] == hadoop producer / copy-test-resources ==
[info] == hadoop producer / copy-test-resources ==
[info] 
[info] == hadoop consumer / copy-resources ==
[info] == hadoop consumer / copy-resources ==
[info] 
[info] == perf / copy-test-resources ==
[info] == perf / copy-test-resources ==
[info] 
[info] == java-examples / copy-resources ==
[info] == java-examples / copy-resources ==
[info] 
[info] == java-examples / copy-test-resources ==
[info] == java-examples / copy-test-resources ==
[info] 
[info] == perf / copy-resources ==
[info] == perf / copy-resources ==
[error] Error running kafka.server.LogRecoveryTest: Test FAILED
[error] Error running compile: javac returned nonzero exit code
[error] Error running compile: javac returned nonzero exit code
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 291 s, completed Nov 19, 2012 12:17:59 AM
[info] 
[info] Total session time: 291 s, completed Nov 19, 2012 12:17:59 AM
[error] Error during build.
Build step 'Execute shell' marked build as failure