You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/11/17 03:12:12 UTC
Build failed in Jenkins: Kafka-0.8 #111
See <https://builds.apache.org/job/Kafka-0.8/111/changes>
Changes:
[jjkoshy] Fix deadlock between leader-finder-thread and consumer-fetcher-thread during broker failure; patched by Joel Koshy; reviewed by Jun Rao; KAFKA-618
pre-commit-status-crumb=5e65bf7a-f347-4600-b3ae-99eed1cd2a78
[junrao] MigrationTool should disable shallow iteration in the 0.7 consumer; patched by Yang Ye; reviewed by Jun Rao; KAFKA-613
------------------------------------------
[...truncated 3115 lines...]
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.net.ConnectException: Connection refused
at sun.nio.ch.Net.connect(Native Method)
at sun.nio.ch.SocketChannelImpl.connect(SocketChannelImpl.java:500)
at kafka.network.BlockingChannel.connect(BlockingChannel.scala:57)
at kafka.producer.SyncProducer.connect(SyncProducer.scala:135)
at kafka.producer.SyncProducer.getOrMakeConnection(SyncProducer.scala:150)
at kafka.producer.SyncProducer.kafka$producer$SyncProducer$$doSend(SyncProducer.scala:71)
at kafka.producer.SyncProducer.send(SyncProducer.scala:101)
at kafka.client.ClientUtils$.fetchTopicMetadata(ClientUtils.scala:25)
... 42 more
[0m[[0minfo[0m] [0mTest Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[2012-11-17 02:11:49,815] ERROR Closing socket for /127.0.0.1 because of error (kafka.network.Processor:102)
java.io.IOException: Connection reset by peer
at sun.nio.ch.FileDispatcher.read0(Native Method)
at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
at sun.nio.ch.IOUtil.read(IOUtil.java:171)
at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
at kafka.utils.Utils$.read(Utils.scala:393)
at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
at kafka.network.Processor.read(SocketServer.scala:293)
at kafka.network.Processor.run(SocketServer.scala:209)
at java.lang.Thread.run(Thread.java:662)
[2012-11-17 02:11:49,815] ERROR Closing socket for /127.0.0.1 because of error (kafka.network.Processor:102)
java.io.IOException: Connection reset by peer
at sun.nio.ch.FileDispatcher.write0(Native Method)
at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:69)
at sun.nio.ch.IOUtil.write(IOUtil.java:40)
at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
at kafka.api.PartitionDataSend.writeTo(FetchResponse.scala:76)
at kafka.network.MultiSend.writeTo(Transmission.scala:94)
at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
at kafka.api.TopicDataSend.writeTo(FetchResponse.scala:133)
at kafka.network.MultiSend.writeTo(Transmission.scala:94)
at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
at kafka.api.FetchResponseSend.writeTo(FetchResponse.scala:232)
at kafka.network.Processor.write(SocketServer.scala:318)
at kafka.network.Processor.run(SocketServer.scala:211)
at java.lang.Thread.run(Thread.java:662)
[2012-11-17 02:11:50,118] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x13b0c22d6da0009, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[0m[[0minfo[0m] [0mTest Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testFileSize[0m
[0m[[0minfo[0m] [0mTest Passed: testFileSize[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Starting: testRead[0m
[0m[[0minfo[0m] [0mTest Passed: testRead[0m
[0m[[0minfo[0m] [0mTest Starting: testSearch[0m
[0m[[0minfo[0m] [0mTest Passed: testSearch[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ConsumerIteratorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ConsumerIteratorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
[0m[[31merror[0m] [0mTest Failed: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
junit.framework.AssertionFailedError: expected:<List(a_test4, b_test4)> but was:<List(a_test4, b_test4, a_test4, b_test4)>
at junit.framework.Assert.fail(Assert.java:47)
at junit.framework.Assert.failNotEquals(Assert.java:277)
at junit.framework.Assert.assertEquals(Assert.java:64)
at junit.framework.Assert.assertEquals(Assert.java:71)
at kafka.integration.PrimitiveApiTest$$anonfun$testMultiProduce$2.apply(PrimitiveApiTest.scala:281)
at kafka.integration.PrimitiveApiTest$$anonfun$testMultiProduce$2.apply(PrimitiveApiTest.scala:279)
at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
at scala.collection.immutable.List.foreach(List.scala:45)
at kafka.integration.PrimitiveApiTest.testMultiProduce(PrimitiveApiTest.scala:279)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [0mTest Starting: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.api.RequestResponseSerializationTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.api.RequestResponseSerializationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testIterator[0m
[0m[[0minfo[0m] [0mTest Starting: testOffsetAssignment[0m
[0m[[0minfo[0m] [0mTest Passed: testOffsetAssignment[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_f00e324e[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 166, Failed 2, Errors 0, Passed 164, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.server.LogRecoveryTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.integration.PrimitiveApiTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 266 s, completed Nov 17, 2012 2:12:11 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 266 s, completed Nov 17, 2012 2:12:11 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #114
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/114/changes>
Changes:
[jkreps] KAFKA-544. Follow-up items on key-retention. Addresses misc. comments from Joel, see ticket for details.
------------------------------------------
[...truncated 2024 lines...]
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at junit.framework.TestCase.runTest(TestCase.java:164)
at junit.framework.TestCase.runBare(TestCase.java:130)
at junit.framework.TestResult$1.protect(TestResult.java:110)
at junit.framework.TestResult.runProtected(TestResult.java:128)
at junit.framework.TestResult.run(TestResult.java:113)
at junit.framework.TestCase.run(TestCase.java:120)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at junit.framework.TestSuite.runTest(TestSuite.java:228)
at junit.framework.TestSuite.run(TestSuite.java:223)
at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[0m[[0minfo[0m] [34m== core-kafka / kafka.admin.AdminTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ServerShutdownTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanShutdown(kafka.server.ServerShutdownTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanShutdown(kafka.server.ServerShutdownTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ServerShutdownTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProducerQueueSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProducerQueueSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAfterClosed(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBatchSize(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testQueueTimeExpired(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testPartitionAndCollateEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializeEvents(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidPartition(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNoBroker(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testRandomPartitioner(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testJavaProducer(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.AsyncProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageCompressionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSimpleCompressDecompress[0m
java.lang.reflect.InvocationTargetException
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.xerial.snappy.SnappyLoader.loadNativeLibrary(SnappyLoader.java:317)
at org.xerial.snappy.SnappyLoader.load(SnappyLoader.java:219)
at org.xerial.snappy.Snappy.<clinit>(Snappy.java:44)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:79)
at org.xerial.snappy.SnappyOutputStream.<init>(SnappyOutputStream.java:66)
at kafka.message.MessageCompressionTest.isSnappyAvailable(MessageCompressionTest.scala:57)
at kafka.message.MessageCompressionTest.testSimpleCompressDecompress(MessageCompressionTest.scala:31)
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
at java.lang.reflect.Method.invoke(Method.java:597)
at org.junit.internal.runners.TestMethodRunner.executeMethodBody(TestMethodRunner.java:99)
at org.junit.internal.runners.TestMethodRunner.runUnprotected(TestMethodRunner.java:81)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestMethodRunner.runMethod(TestMethodRunner.java:75)
at org.junit.internal.runners.TestMethodRunner.run(TestMethodRunner.java:45)
at org.junit.internal.runners.TestClassMethodsRunner.invokeTestMethod(TestClassMethodsRunner.java:71)
at org.junit.internal.runners.TestClassMethodsRunner.run(TestClassMethodsRunner.java:35)
at org.junit.internal.runners.TestClassRunner$1.runUnprotected(TestClassRunner.java:42)
at org.junit.internal.runners.BeforeAndAfterRunner.runProtected(BeforeAndAfterRunner.java:34)
at org.junit.internal.runners.TestClassRunner.run(TestClassRunner.java:52)
at org.junit.internal.runners.CompositeRunner.run(CompositeRunner.java:29)
at org.junit.runner.JUnitCore.run(JUnitCore.java:121)
at org.junit.runner.JUnitCore.run(JUnitCore.java:100)
at org.junit.runner.JUnitCore.run(JUnitCore.java:91)
at org.scalatest.junit.JUnitSuite$class.run(JUnitSuite.scala:261)
at kafka.message.MessageCompressionTest.run(MessageCompressionTest.scala:26)
at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
at sbt.TestRunner.run(TestFramework.scala:53)
at sbt.TestRunner.runTest$1(TestFramework.scala:67)
at sbt.TestRunner.run(TestFramework.scala:76)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
at sbt.NamedTestTask.run(TestFramework.scala:92)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
at sbt.impl.RunTask.runTask(RunTask.scala:85)
at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
at sbt.Control$.trapUnit(Control.scala:19)
at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
Caused by: java.lang.UnsatisfiedLinkError: no snappyjava in java.library.path
at java.lang.ClassLoader.loadLibrary(ClassLoader.java:1734)
at java.lang.Runtime.loadLibrary0(Runtime.java:823)
at java.lang.System.loadLibrary(System.java:1028)
at org.xerial.snappy.SnappyNativeLoader.loadLibrary(SnappyNativeLoader.java:52)
... 51 more
[0m[[0minfo[0m] [0mTest Passed: testSimpleCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Starting: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Passed: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageCompressionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeBasedLogRoll[0m
[0m[[0minfo[0m] [0mTest Starting: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Passed: testLoadEmptyLog[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendAndRead[0m
[0m[[0minfo[0m] [0mTest Starting: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOutOfRange[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRolls[0m
[0m[[0minfo[0m] [0mTest Starting: testCompressedMessages[0m
[0m[[0minfo[0m] [0mTest Passed: testCompressedMessages[0m
[0m[[0minfo[0m] [0mTest Starting: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Passed: testFindSegment[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRollsStartingAtZero[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRollsStartingAtZero[0m
[0m[[0minfo[0m] [0mTest Starting: testEdgeLogRollsStartingAtNonZero[0m
[0m[[0minfo[0m] [0mTest Passed: testEdgeLogRollsStartingAtNonZero[0m
[0m[[0minfo[0m] [0mTest Starting: testMessageSizeCheck[0m
[0m[[0minfo[0m] [0mTest Passed: testMessageSizeCheck[0m
[0m[[0minfo[0m] [0mTest Starting: testLogRecoversToCorrectOffset[0m
[0m[[0minfo[0m] [0mTest Passed: testLogRecoversToCorrectOffset[0m
[0m[[0minfo[0m] [0mTest Starting: testTruncateTo[0m
[0m[[0minfo[0m] [0mTest Passed: testTruncateTo[0m
[0m[[0minfo[0m] [0mTest Starting: testIndexResizingAtTruncation[0m
[0m[[0minfo[0m] [0mTest Passed: testIndexResizingAtTruncation[0m
[0m[[0minfo[0m] [0mTest Starting: testAppendWithoutOffsetAssignment[0m
[0m[[0minfo[0m] [0mTest Passed: testAppendWithoutOffsetAssignment[0m
[0m[[0minfo[0m] [0mTest Starting: testReopenThenTruncate[0m
[0m[[0minfo[0m] [0mTest Passed: testReopenThenTruncate[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testFileSize[0m
[0m[[0minfo[0m] [0mTest Passed: testFileSize[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationOverPartialAndTruncation[0m
[0m[[0minfo[0m] [0mTest Starting: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Passed: testIterationDoesntChangePosition[0m
[0m[[0minfo[0m] [0mTest Starting: testRead[0m
[0m[[0minfo[0m] [0mTest Passed: testRead[0m
[0m[[0minfo[0m] [0mTest Starting: testSearch[0m
[0m[[0minfo[0m] [0mTest Passed: testSearch[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.FileMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaFetcherThread(kafka.server.ReplicaFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.ReplicaFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistentWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testEqualsWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testEqualsWithCompression[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 167, Failed 1, Errors 0, Passed 166, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /var/tmp/sbt_32c34d6e[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[31merror[0m] [0mError running kafka.admin.AdminTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 305 s, completed Nov 26, 2012 9:09:22 PM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 306 s, completed Nov 26, 2012 9:09:22 PM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Re: Build failed in Jenkins: Kafka-0.8 #113
Posted by Jay Kreps <ja...@gmail.com>.
Hey Jun,
I think AdminTest.testShutdownBroker started failing consistently after
this checkin.
-Jay
On Sun, Nov 18, 2012 at 10:09 PM, Apache Jenkins Server <
jenkins@builds.apache.org> wrote:
> See <https://builds.apache.org/job/Kafka-0.8/113/changes>
>
> Changes:
>
> [junrao] move shutting down of fetcher thread out of critical path;
> patched by Jun Rao; reviewed by Neha Narkhede; KAFKA-612
>
> ------------------------------------------
> [...truncated 2862 lines...]
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:14,085] ERROR [KafkaApi-0] error when processing request
> (test1,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1
> doesn't exist on 0
> at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [0m[ [0minfo [0m] [0mTest Passed:
> testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testMultiProduce(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testMultiProduce(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testEmptyFetchRequest(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
> [2012-11-19 06:09:18,635] ERROR [KafkaApi-0] error when processing request
> (test2,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
> at kafka.log.Log.read(Log.scala:371)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request
> (test3,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
> at kafka.log.Log.read(Log.scala:371)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request
> (test4,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
> at kafka.log.Log.read(Log.scala:371)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,637] ERROR [KafkaApi-0] error when processing request
> (test1,0,-1,10000) (kafka.server.KafkaApis:102)
> kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only
> have log segments in the range 0 to 2.
> at kafka.log.Log.read(Log.scala:371)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,639] ERROR [KafkaApi-0] error when processing request
> (test2,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test2 partition -1
> doesn't exist on 0
> at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request
> (test3,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test3 partition -1
> doesn't exist on 0
> at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request
> (test4,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test4 partition -1
> doesn't exist on 0
> at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [2012-11-19 06:09:18,641] ERROR [KafkaApi-0] error when processing request
> (test1,-1,0,10000) (kafka.server.KafkaApis:102)
> kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1
> doesn't exist on 0
> at
> kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
> at
> kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at
> scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
> at
> scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.immutable.Map$Map4.map(Map.scala:157)
> at
> kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
> at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
> at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
> at
> kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
> at java.lang.Thread.run(Thread.java:662)
> [0m[ [0minfo [0m] [0mTest Passed:
> testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)
> [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest) [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest
> == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest
> == [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
> log4j:WARN No appenders could be found for logger
> (org.I0Itec.zkclient.ZkEventThread).
> log4j:WARN Please initialize the log4j system properly.
> [0m[ [0minfo [0m] [0mTest Passed:
> testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest
> == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testKafkaTimer(kafka.metrics.KafkaTimerTest) [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka /
> kafka.message.ByteBufferMessageSetTest == [0m
> [0m[ [0minfo [0m] [0mTest Starting: testWrittenEqualsRead [0m
> [0m[ [0minfo [0m] [0mTest Passed: testWrittenEqualsRead [0m
> [0m[ [0minfo [0m] [0mTest Starting: testIteratorIsConsistent [0m
> [0m[ [0minfo [0m] [0mTest Passed: testIteratorIsConsistent [0m
> [0m[ [0minfo [0m] [0mTest Starting: testSizeInBytes [0m
> [0m[ [0minfo [0m] [0mTest Passed: testSizeInBytes [0m
> [0m[ [0minfo [0m] [0mTest Starting: testEquals [0m
> [0m[ [0minfo [0m] [0mTest Passed: testEquals [0m
> [0m[ [0minfo [0m] [0mTest Starting: testWriteTo [0m
> [0m[ [0minfo [0m] [0mTest Passed: testWriteTo [0m
> [0m[ [0minfo [0m] [0mTest Starting: testValidBytes [0m
> [0m[ [0minfo [0m] [0mTest Passed: testValidBytes [0m
> [0m[ [0minfo [0m] [0mTest Starting: testValidBytesWithCompression [0m
> [0m[ [0minfo [0m] [0mTest Passed: testValidBytesWithCompression [0m
> [0m[ [0minfo [0m] [0mTest Starting: testIterator [0m
> [0m[ [0minfo [0m] [0mTest Passed: testIterator [0m
> [0m[ [0minfo [0m] [0mTest Starting: testOffsetAssignment [0m
> [0m[ [0minfo [0m] [0mTest Passed: testOffsetAssignment [0m
> [0m[ [0minfo [0m] [34m== core-kafka /
> kafka.message.ByteBufferMessageSetTest == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.producer.ProducerTest == [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testSendToNewTopic(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testSendToNewTopic(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testSendWithDeadBroker(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testSendWithDeadBroker(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest) [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.producer.ProducerTest == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.FetcherTest ==
> [0m
> [0m[ [0minfo [0m] [0mTest Starting:
> testFetcher(kafka.integration.FetcherTest) [0m
> [0m[ [0minfo [0m] [0mTest Passed:
> testFetcher(kafka.integration.FetcherTest) [0m
> [0m[ [0minfo [0m] [34m== core-kafka / kafka.integration.FetcherTest ==
> [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m
> [0m[ [0minfo [0m] [0mDeleting directory /tmp/sbt_b5aaac46 [0m
> [0m[ [0minfo [0m] [34m== core-kafka / Test cleanup 1 == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m
> [0m[ [31merror [0m] [0mFailed: : Total 167, Failed 2, Errors 0, Passed
> 165, Skipped 0 [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-finish == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m
> [0m[ [0minfo [0m] [34m== core-kafka / test-cleanup == [0m
> [0m[ [0minfo [0m] [34m [0m
> [0m[ [0minfo [0m] [34m== hadoop consumer / copy-test-resources == [0m
> [0m[ [0minfo [0m] [34m== hadoop consumer / copy-test-resources == [0m
> [0m[ [31merror [0m] [0mError running kafka.server.LogRecoveryTest: Test
> FAILED [0m
> [0m[ [31merror [0m] [0mError running kafka.admin.AdminTest: Test FAILED
> [0m
> [0m[ [31merror [0m] [0mError running test: One or more subtasks failed
> [0m
> [0m[ [0minfo [0m] [0m [0m
> [0m[ [0minfo [0m] [0mTotal time: 233 s, completed Nov 19, 2012 6:09:42
> AM [0m
> [0m[ [0minfo [0m] [0m [0m
> [0m[ [0minfo [0m] [0mTotal session time: 233 s, completed Nov 19, 2012
> 6:09:42 AM [0m
> [0m[ [31merror [0m] [0mError during build. [0m
> Build step 'Execute shell' marked build as failure
>
Build failed in Jenkins: Kafka-0.8 #113
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/113/changes>
Changes:
[junrao] move shutting down of fetcher thread out of critical path; patched by Jun Rao; reviewed by Neha Narkhede; KAFKA-612
------------------------------------------
[...truncated 2862 lines...]
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:14,085] ERROR [KafkaApi-0] error when processing request (test1,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 doesn't exist on 0
at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduce(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetchRequestCanProperlySerialize(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyFetchRequest(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetch(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testDefaultEncoderProducerAndFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[2012-11-19 06:09:18,635] ERROR [KafkaApi-0] error when processing request (test2,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
at kafka.log.Log.read(Log.scala:371)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request (test3,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
at kafka.log.Log.read(Log.scala:371)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,636] ERROR [KafkaApi-0] error when processing request (test4,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
at kafka.log.Log.read(Log.scala:371)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,637] ERROR [KafkaApi-0] error when processing request (test1,0,-1,10000) (kafka.server.KafkaApis:102)
kafka.common.OffsetOutOfRangeException: Request for offset -1 but we only have log segments in the range 0 to 2.
at kafka.log.Log.read(Log.scala:371)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:368)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,639] ERROR [KafkaApi-0] error when processing request (test2,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test2 partition -1 doesn't exist on 0
at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request (test3,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test3 partition -1 doesn't exist on 0
at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,640] ERROR [KafkaApi-0] error when processing request (test4,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test4 partition -1 doesn't exist on 0
at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[2012-11-19 06:09:18,641] ERROR [KafkaApi-0] error when processing request (test1,-1,0,10000) (kafka.server.KafkaApis:102)
kafka.common.UnknownTopicOrPartitionException: Topic test1 partition -1 doesn't exist on 0
at kafka.server.ReplicaManager.getLeaderReplicaIfLocal(ReplicaManager.scala:163)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:359)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:325)
at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:321)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.foreach(Map.scala:180)
at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
at scala.collection.immutable.Map$Map4.map(Map.scala:157)
at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:321)
at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:289)
at kafka.server.KafkaApis.handle(KafkaApis.scala:57)
at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:41)
at java.lang.Thread.run(Thread.java:662)
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetchWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceWithCompression(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerEmptyTopic(kafka.integration.PrimitiveApiTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.PrimitiveApiTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[0m[[0minfo[0m] [0mTest Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaTimer(kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testKafkaTimer(kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Passed: testWrittenEqualsRead[0m
[0m[[0minfo[0m] [0mTest Starting: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Passed: testIteratorIsConsistent[0m
[0m[[0minfo[0m] [0mTest Starting: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testSizeInBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testEquals[0m
[0m[[0minfo[0m] [0mTest Passed: testEquals[0m
[0m[[0minfo[0m] [0mTest Starting: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Passed: testWriteTo[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytes[0m
[0m[[0minfo[0m] [0mTest Starting: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Passed: testValidBytesWithCompression[0m
[0m[[0minfo[0m] [0mTest Starting: testIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testIterator[0m
[0m[[0minfo[0m] [0mTest Starting: testOffsetAssignment[0m
[0m[[0minfo[0m] [0mTest Passed: testOffsetAssignment[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.ByteBufferMessageSetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.FetcherTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFetcher(kafka.integration.FetcherTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetcher(kafka.integration.FetcherTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.FetcherTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_b5aaac46[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 167, Failed 2, Errors 0, Passed 165, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[31merror[0m] [0mError running kafka.server.LogRecoveryTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running kafka.admin.AdminTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 233 s, completed Nov 19, 2012 6:09:42 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 233 s, completed Nov 19, 2012 6:09:42 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure
Build failed in Jenkins: Kafka-0.8 #112
Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/112/changes>
Changes:
[nehanarkhede] KAFKA-532 Multiple controllers can co-exist during soft failures; patched by Neha Narkhede; reviewed by Jun Rao
------------------------------------------
[...truncated 1398 lines...]
[0m[[0minfo[0m] [0mTest Starting: testReadOnEmptySegment(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReadOnEmptySegment(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReadBeforeFirstOffset(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReadBeforeFirstOffset(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReadSingleMessage(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReadSingleMessage(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReadAfterLast(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReadAfterLast(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReadFromGap(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReadFromGap(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTruncate(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTruncate(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTruncateFull(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTruncateFull(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testNextOffsetCalculation(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNextOffsetCalculation(kafka.log.LogSegmentTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogSegmentTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.RequestPurgatoryTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testRequestSatisfaction(kafka.server.RequestPurgatoryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testRequestSatisfaction(kafka.server.RequestPurgatoryTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testRequestExpiry(kafka.server.RequestPurgatoryTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testRequestExpiry(kafka.server.RequestPurgatoryTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.RequestPurgatoryTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceSinglePartition(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testHighWatermarkPersistenceMultiplePartitions(kafka.server.HighwatermarkPersistenceTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.HighwatermarkPersistenceTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsForUnknownTopic(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeLatestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testEmptyLogsGetOffsets(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeNow(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetOffsetsBeforeEarliestTime(kafka.log.LogOffsetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogOffsetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Passed: simpleRequest[0m
[0m[[0minfo[0m] [0mTest Starting: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [0mTest Passed: tooBigRequestIsRejected[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.network.SocketServerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Passed: testWhitelists[0m
[0m[[0minfo[0m] [0mTest Starting: testBlacklists[0m
[0m[[0minfo[0m] [0mTest Passed: testBlacklists[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.TopicFilterTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.FetcherTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFetcher(kafka.integration.FetcherTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testFetcher(kafka.integration.FetcherTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.FetcherTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.utils.UtilsTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSwallow[0m
[0m[[0minfo[0m] [0mTest Passed: testSwallow[0m
[0m[[0minfo[0m] [0mTest Starting: testCircularIterator[0m
[0m[[0minfo[0m] [0mTest Passed: testCircularIterator[0m
[0m[[0minfo[0m] [0mTest Starting: testReadBytes[0m
[0m[[0minfo[0m] [0mTest Passed: testReadBytes[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.utils.UtilsTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCreateLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testGetLog(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupExpiredSegments(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testCleanupSegmentsToMaintainSize(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTimeBasedFlush(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeastLoadedAssignment(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeastLoadedAssignment(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testTwoLogManagersUsingSameDirFails(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTwoLogManagersUsingSameDirFails(kafka.log.LogManagerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.log.LogManagerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Passed: testFieldValues[0m
[0m[[0minfo[0m] [0mTest Starting: testChecksum[0m
[0m[[0minfo[0m] [0mTest Passed: testChecksum[0m
[0m[[0minfo[0m] [0mTest Starting: testEquality[0m
[0m[[0minfo[0m] [0mTest Passed: testEquality[0m
[0m[[0minfo[0m] [0mTest Starting: testIsHashable[0m
[0m[[0minfo[0m] [0mTest Passed: testIsHashable[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.TopicMetadataTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.IsrExpirationTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testIsrExpirationForStuckFollowers(kafka.server.IsrExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testIsrExpirationForStuckFollowers(kafka.server.IsrExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testIsrExpirationForSlowFollowers(kafka.server.IsrExpirationTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testIsrExpirationForSlowFollowers(kafka.server.IsrExpirationTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.IsrExpirationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.AutoOffsetResetTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testKafkaTimer(kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testKafkaTimer(kafka.metrics.KafkaTimerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.metrics.KafkaTimerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testNonReplicaSeesHwWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testReplicaSeesLeoWhenFetching(kafka.server.SimpleFetchTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.SimpleFetchTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageCompressionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSimpleCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Passed: testSimpleCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Starting: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [0mTest Passed: testComplexCompressDecompress[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.message.MessageCompressionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.api.RequestResponseSerializationTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [0mTest Passed: testSerializationAndDeserialization[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.api.RequestResponseSerializationTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testUpdateBrokerPartitionInfo(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendToNewTopic(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testSendWithDeadBroker(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.producer.ProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testLeaderElectionWithStaleControllerEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testLeaderElectionWithStaleControllerEpoch(kafka.server.LeaderElectionTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.server.LeaderElectionTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndMultiFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduce(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testProduceAndFetch(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Starting: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testMultiProduceResend(kafka.integration.LazyInitProducerTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.integration.LazyInitProducerTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ConsumerIteratorTest ==[0m
[0m[[0minfo[0m] [0mTest Starting: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)[0m
[0m[[0minfo[0m] [0mTest Passed: testConsumerIteratorDeduplicationDeepIterator(kafka.consumer.ConsumerIteratorTest)[0m
[0m[[0minfo[0m] [34m== core-kafka / kafka.consumer.ConsumerIteratorTest ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [0mDeleting directory /tmp/sbt_9ee7511c[0m
[0m[[0minfo[0m] [34m== core-kafka / Test cleanup 1 ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[31merror[0m] [0mFailed: : Total 167, Failed 1, Errors 0, Passed 166, Skipped 0[0m
[0m[[0minfo[0m] [34m== core-kafka / test-finish ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m== core-kafka / test-cleanup ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== java-examples / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [0m Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.[0m
[0m[[0minfo[0m] [0mCompiling test sources...[0m
[0m[[0minfo[0m] [0mNothing to compile.[0m
[0m[[0minfo[0m] [0m Post-analysis: 0 classes.[0m
[0m[[0minfo[0m] [34m== perf / test-compile ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop producer / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m== hadoop consumer / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m== java-examples / copy-test-resources ==[0m
[0m[[0minfo[0m] [34m[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[0minfo[0m] [34m== perf / copy-resources ==[0m
[0m[[31merror[0m] [0mError running kafka.server.LogRecoveryTest: Test FAILED[0m
[0m[[31merror[0m] [0mError running compile: javac returned nonzero exit code[0m
[0m[[31merror[0m] [0mError running compile: javac returned nonzero exit code[0m
[0m[[31merror[0m] [0mError running test: One or more subtasks failed[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal time: 291 s, completed Nov 19, 2012 12:17:59 AM[0m
[0m[[0minfo[0m] [0m[0m
[0m[[0minfo[0m] [0mTotal session time: 291 s, completed Nov 19, 2012 12:17:59 AM[0m
[0m[[31merror[0m] [0mError during build.[0m
Build step 'Execute shell' marked build as failure