You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@kafka.apache.org by Apache Jenkins Server <je...@builds.apache.org> on 2012/08/01 19:27:25 UTC

Build failed in Jenkins: Kafka-0.8 #16

See <https://builds.apache.org/job/Kafka-0.8/16/changes>

Changes:

[junrao] recommit: revisit the become leader and become follower state change operations using V3 design; patched by Yang Ye; reviewed by Neha Narkhede and Jun Rao; kafka-343

------------------------------------------
[...truncated 3415 lines...]
[2012-08-01 17:27:08,432] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,-1,1048576) (kafka.server.KafkaApis:99)
kafka.common.OffsetOutOfRangeException: offset -1 is out of range
	at kafka.log.Log$.findRange(Log.scala:46)
	at kafka.log.Log.read(Log.scala:265)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
	at java.lang.Thread.run(Thread.java:662)
[2012-08-01 17:27:08,446] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
	at sun.nio.ch.IOUtil.read(IOUtil.java:171)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
	at kafka.utils.Utils$.read(Utils.scala:630)
	at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
	at kafka.network.Processor.read(SocketServer.scala:296)
	at kafka.network.Processor.run(SocketServer.scala:212)
	at java.lang.Thread.run(Thread.java:662)
[info] Test Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[2012-08-01 17:27:09,203] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,10000,1048576) (kafka.server.KafkaApis:99)
kafka.common.OffsetOutOfRangeException: offset 10000 is out of range
	at kafka.log.Log$.findRange(Log.scala:46)
	at kafka.log.Log.read(Log.scala:265)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
	at java.lang.Thread.run(Thread.java:662)
[2012-08-01 17:27:11,197] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
	at sun.nio.ch.IOUtil.read(IOUtil.java:171)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
	at kafka.utils.Utils$.read(Utils.scala:630)
	at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
	at kafka.network.Processor.read(SocketServer.scala:296)
	at kafka.network.Processor.run(SocketServer.scala:212)
	at java.lang.Thread.run(Thread.java:662)
[info] Test Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest)
[info] Test Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[2012-08-01 17:27:12,365] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,-1,1048576) (kafka.server.KafkaApis:99)
kafka.common.OffsetOutOfRangeException: offset -1 is out of range
	at kafka.log.Log$.findRange(Log.scala:46)
	at kafka.log.Log.read(Log.scala:265)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
	at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
	at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
	at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
	at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
	at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
	at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
	at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
	at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
	at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
	at java.lang.Thread.run(Thread.java:662)
[2012-08-01 17:27:13,044] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a0beb0012, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:13,246] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a0beb0016, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:14,333] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.read0(Native Method)
	at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
	at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
	at sun.nio.ch.IOUtil.read(IOUtil.java:171)
	at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
	at kafka.utils.Utils$.read(Utils.scala:630)
	at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
	at kafka.network.Processor.read(SocketServer.scala:296)
	at kafka.network.Processor.run(SocketServer.scala:212)
	at java.lang.Thread.run(Thread.java:662)
[2012-08-01 17:27:14,347] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0004, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[info] Test Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest)
[info] == core-kafka / kafka.integration.AutoOffsetResetTest ==
[info] 
[info] == core-kafka / kafka.integration.TopicMetadataTest ==
[info] Test Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)
[info] Test Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest)
[info] Test Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)
[info] Test Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest)
[info] Test Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest)
[info] Test Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest)
[info] == core-kafka / kafka.integration.TopicMetadataTest ==
[info] 
[info] == core-kafka / kafka.server.LeaderElectionTest ==
[info] Test Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)
[2012-08-01 17:27:15,189] ERROR Kafka Log on Broker 1, Cannot truncate log to 0 since the log start offset is 0 and end offset is 0 (kafka.log.Log:93)
[2012-08-01 17:27:15,694] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
java.io.IOException: Connection reset by peer
	at sun.nio.ch.FileDispatcher.write0(Native Method)
	at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
	at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:69)
	at sun.nio.ch.IOUtil.write(IOUtil.java:40)
	at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
	at kafka.api.PartitionDataSend.writeTo(FetchResponse.scala:66)
	at kafka.network.MultiSend.writeTo(Transmission.scala:94)
	at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
	at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
	at kafka.api.TopicDataSend.writeTo(FetchResponse.scala:142)
	at kafka.network.MultiSend.writeTo(Transmission.scala:94)
	at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
	at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
	at kafka.api.FetchResponseSend.writeTo(FetchResponse.scala:219)
	at kafka.network.Processor.write(SocketServer.scala:321)
	at kafka.network.Processor.run(SocketServer.scala:214)
	at java.lang.Thread.run(Thread.java:662)
[2012-08-01 17:27:15,834] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0007, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:15,835] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0012, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:17,261] ERROR Kafka Log on Broker 1, Cannot truncate log to 0 since the log start offset is 0 and end offset is 0 (kafka.log.Log:93)
[2012-08-01 17:27:19,635] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0014, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:19,636] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0015, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:19,645] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0006, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:19,665] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0018, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[2012-08-01 17:27:19,728] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
java.nio.channels.CancelledKeyException
	at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
	at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
	at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
	at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
	at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
	at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
[2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
java.nio.channels.CancelledKeyException
	at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
	at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
	at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
	at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
	at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
	at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
[2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
java.nio.channels.CancelledKeyException
	at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
	at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
	at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
	at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
	at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
	at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
[2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
java.nio.channels.CancelledKeyException
	at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
	at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
	at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
	at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
	at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
	at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
[info] Test Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest)
[info] == core-kafka / kafka.server.LeaderElectionTest ==
[info] 
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] Test Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[info] Test Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Starting: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Passed: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] 
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] Test Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] Test Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest)
[info] == core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /tmp/sbt_501f0f08
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 136, Failed 3, Errors 0, Passed 133, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[info] 
[info] == java-examples / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == java-examples / test-compile ==
[info] 
[info] == hadoop consumer / copy-test-resources ==
[info] == hadoop consumer / copy-test-resources ==
[info] 
[info] == hadoop consumer / copy-resources ==
[info] == hadoop consumer / copy-resources ==
[info] 
[info] == perf / copy-resources ==
[info] == perf / copy-resources ==
[info] 
[info] == java-examples / copy-test-resources ==
[info] == java-examples / copy-test-resources ==
[info] 
[info] == perf / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == perf / test-compile ==
[info] 
[info] == hadoop consumer / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == hadoop consumer / test-compile ==
[info] 
[info] == perf / copy-test-resources ==
[info] == perf / copy-test-resources ==
[info] 
[info] == hadoop producer / copy-resources ==
[info] == hadoop producer / copy-resources ==
[info] 
[info] == java-examples / copy-resources ==
[info] == java-examples / copy-resources ==
[error] Error running kafka.producer.SyncProducerTest: Test FAILED
[error] Error running kafka.server.LogRecoveryTest: Test FAILED
[error] Error running kafka.server.ServerShutdownTest: Test FAILED
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 229 s, completed Aug 1, 2012 5:27:24 PM
[info] 
[info] Total session time: 229 s, completed Aug 1, 2012 5:27:24 PM
[error] Error during build.
Build step 'Execute shell' marked build as failure

Jenkins build is back to normal : Kafka-0.8 #18

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/18/changes>


Build failed in Jenkins: Kafka-0.8 #17

Posted by Apache Jenkins Server <je...@builds.apache.org>.
See <https://builds.apache.org/job/Kafka-0.8/17/changes>

Changes:

[nehanarkhede] KAFKA-380 Enhance single_host_multi_brokers test with failure to trigger leader re-election in replication; patched by John Fung; reviewed by Neha Narkhede, Jun Rao and Joel Koshy

------------------------------------------
[...truncated 1987 lines...]
	at junit.framework.TestCase.runBare(TestCase.java:130)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:120)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[2012-08-06 18:09:51,684] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138fd1f554f0008, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[info] Test Passed: testInvalidPartition(kafka.producer.AsyncProducerTest)
[info] Test Starting: testNoBroker(kafka.producer.AsyncProducerTest)
[2012-08-06 18:09:51,702] ERROR Failed to collate messages by topic, partition due to (kafka.producer.async.DefaultEventHandler:99)
kafka.common.NoBrokersForPartitionException: Partition = null
	at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$getPartitionListForTopic(DefaultEventHandler.scala:145)
	at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:102)
	at kafka.producer.async.DefaultEventHandler$$anonfun$partitionAndCollate$1.apply(DefaultEventHandler.scala:101)
	at scala.collection.LinearSeqOptimized$class.foreach(LinearSeqOptimized.scala:61)
	at scala.collection.immutable.List.foreach(List.scala:45)
	at scala.collection.generic.TraversableForwarder$class.foreach(TraversableForwarder.scala:44)
	at scala.collection.mutable.ListBuffer.foreach(ListBuffer.scala:42)
	at kafka.producer.async.DefaultEventHandler.partitionAndCollate(DefaultEventHandler.scala:101)
	at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:66)
	at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:49)
	at kafka.producer.AsyncProducerTest.testNoBroker(AsyncProducerTest.scala:296)
	at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
	at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:39)
	at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:25)
	at java.lang.reflect.Method.invoke(Method.java:597)
	at junit.framework.TestCase.runTest(TestCase.java:164)
	at junit.framework.TestCase.runBare(TestCase.java:130)
	at junit.framework.TestResult$1.protect(TestResult.java:110)
	at junit.framework.TestResult.runProtected(TestResult.java:128)
	at junit.framework.TestResult.run(TestResult.java:113)
	at junit.framework.TestCase.run(TestCase.java:120)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at junit.framework.TestSuite.runTest(TestSuite.java:228)
	at junit.framework.TestSuite.run(TestSuite.java:223)
	at org.scalatest.junit.JUnit3Suite.run(JUnit3Suite.scala:309)
	at org.scalatest.tools.ScalaTestFramework$ScalaTestRunner.run(ScalaTestFramework.scala:40)
	at sbt.TestRunner.run(TestFramework.scala:53)
	at sbt.TestRunner.runTest$1(TestFramework.scala:67)
	at sbt.TestRunner.run(TestFramework.scala:76)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11.runTest$2(TestFramework.scala:194)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.TestFramework$$anonfun$10$$anonfun$apply$11$$anonfun$apply$12.apply(TestFramework.scala:205)
	at sbt.NamedTestTask.run(TestFramework.scala:92)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.ScalaProject$$anonfun$sbt$ScalaProject$$toTask$1.apply(ScalaProject.scala:193)
	at sbt.TaskManager$Task.invoke(TaskManager.scala:62)
	at sbt.impl.RunTask.doRun$1(RunTask.scala:77)
	at sbt.impl.RunTask.runTask(RunTask.scala:85)
	at sbt.impl.RunTask.sbt$impl$RunTask$$runIfNotRoot(RunTask.scala:60)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.impl.RunTask$$anonfun$runTasksExceptRoot$2.apply(RunTask.scala:48)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Distributor$Run$Worker$$anonfun$2.apply(ParallelRunner.scala:131)
	at sbt.Control$.trapUnit(Control.scala:19)
	at sbt.Distributor$Run$Worker.run(ParallelRunner.scala:131)
[info] Test Passed: testNoBroker(kafka.producer.AsyncProducerTest)
[info] Test Starting: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)
[info] Test Passed: testIncompatibleEncoder(kafka.producer.AsyncProducerTest)
[info] Test Starting: testRandomPartitioner(kafka.producer.AsyncProducerTest)
[info] Test Passed: testRandomPartitioner(kafka.producer.AsyncProducerTest)
[info] Test Starting: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)
[info] Test Passed: testBrokerListAndAsync(kafka.producer.AsyncProducerTest)
[info] Test Starting: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)
[info] Test Passed: testFailedSendRetryLogic(kafka.producer.AsyncProducerTest)
[info] Test Starting: testJavaProducer(kafka.producer.AsyncProducerTest)
[2012-08-06 18:09:51,994] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138fd2090ce0011, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
[info] Test Passed: testJavaProducer(kafka.producer.AsyncProducerTest)
[info] Test Starting: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] Test Passed: testInvalidConfiguration(kafka.producer.AsyncProducerTest)
[info] == core-kafka / kafka.producer.AsyncProducerTest ==
[info] 
[info] == core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==
[info] Test Starting: testWrittenEqualsRead
[info] Test Passed: testWrittenEqualsRead
[info] Test Starting: testIteratorIsConsistent
[info] Test Passed: testIteratorIsConsistent
[info] Test Starting: testSizeInBytes
[info] Test Passed: testSizeInBytes
[info] Test Starting: testValidBytes
[info] Test Passed: testValidBytes
[info] Test Starting: testEquals
[info] Test Passed: testEquals
[info] Test Starting: testIteratorIsConsistentWithCompression
[info] Test Passed: testIteratorIsConsistentWithCompression
[info] Test Starting: testSizeInBytesWithCompression
[info] Test Passed: testSizeInBytesWithCompression
[info] Test Starting: testValidBytesWithCompression
[info] Test Passed: testValidBytesWithCompression
[info] Test Starting: testEqualsWithCompression
[info] Test Passed: testEqualsWithCompression
[info] == core-kafka / kafka.javaapi.message.ByteBufferMessageSetTest ==
[info] 
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] Test Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
log4j:WARN Please initialize the log4j system properly.
[info] Test Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Starting: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] Test Passed: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest)
[info] == core-kafka / kafka.log4j.KafkaLog4jAppenderTest ==
[info] 
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] Test Starting: testZKSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Passed: testZKSendToNewTopic(kafka.producer.ProducerTest)
[info] Test Starting: testZKSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Passed: testZKSendWithDeadBroker(kafka.producer.ProducerTest)
[info] Test Starting: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] Test Passed: testAsyncSendCanCorrectlyFailWithTimeout(kafka.producer.ProducerTest)
[info] == core-kafka / kafka.producer.ProducerTest ==
[info] 
[info] == core-kafka / kafka.admin.AdminTest ==
[info] Test Starting: testReplicaAssignment(kafka.admin.AdminTest)
[info] Test Passed: testReplicaAssignment(kafka.admin.AdminTest)
[info] Test Starting: testManualReplicaAssignment(kafka.admin.AdminTest)
[info] Test Passed: testManualReplicaAssignment(kafka.admin.AdminTest)
[info] Test Starting: testTopicCreationInZK(kafka.admin.AdminTest)
[info] Test Passed: testTopicCreationInZK(kafka.admin.AdminTest)
[info] Test Starting: testGetTopicMetadata(kafka.admin.AdminTest)
[info] Test Passed: testGetTopicMetadata(kafka.admin.AdminTest)
[info] == core-kafka / kafka.admin.AdminTest ==
[info] 
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] Test Starting: simpleRequest
[info] Test Passed: simpleRequest
[info] Test Starting: tooBigRequestIsRejected
[info] Test Passed: tooBigRequestIsRejected
[info] == core-kafka / kafka.network.SocketServerTest ==
[info] 
[info] == core-kafka / kafka.message.MessageTest ==
[info] Test Starting: testFieldValues
[info] Test Passed: testFieldValues
[info] Test Starting: testChecksum
[info] Test Passed: testChecksum
[info] Test Starting: testEquality
[info] Test Passed: testEquality
[info] Test Starting: testIsHashable
[info] Test Passed: testIsHashable
[info] == core-kafka / kafka.message.MessageTest ==
[info] 
[info] == core-kafka / kafka.producer.SyncProducerTest ==
[info] Test Starting: testReachableServer(kafka.producer.SyncProducerTest)
[info] Test Passed: testReachableServer(kafka.producer.SyncProducerTest)
[info] Test Starting: testEmptyProduceRequest(kafka.producer.SyncProducerTest)
[info] Test Passed: testEmptyProduceRequest(kafka.producer.SyncProducerTest)
[info] Test Starting: testSingleMessageSizeTooLarge(kafka.producer.SyncProducerTest)
[info] Test Passed: testSingleMessageSizeTooLarge(kafka.producer.SyncProducerTest)
[info] Test Starting: testCompressedMessageSizeTooLarge(kafka.producer.SyncProducerTest)
[info] Test Passed: testCompressedMessageSizeTooLarge(kafka.producer.SyncProducerTest)
[info] Test Starting: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)
[info] Test Passed: testProduceCorrectlyReceivesResponse(kafka.producer.SyncProducerTest)
[info] Test Starting: testProducerCanTimeout(kafka.producer.SyncProducerTest)
[info] Test Passed: testProducerCanTimeout(kafka.producer.SyncProducerTest)
[info] Test Starting: testProduceRequestForUnknownTopic(kafka.producer.SyncProducerTest)
[info] Test Passed: testProduceRequestForUnknownTopic(kafka.producer.SyncProducerTest)
[info] == core-kafka / kafka.producer.SyncProducerTest ==
[info] 
[info] == core-kafka / kafka.controller.ControllerBasicTest ==
[info] Test Starting: testControllerFailOver(kafka.controller.ControllerBasicTest)
[info] Test Passed: testControllerFailOver(kafka.controller.ControllerBasicTest)
[info] Test Starting: testControllerCommandSend(kafka.controller.ControllerBasicTest)
[info] Test Passed: testControllerCommandSend(kafka.controller.ControllerBasicTest)
[info] == core-kafka / kafka.controller.ControllerBasicTest ==
[info] 
[info] == core-kafka / test-finish ==
[error] Failed: : Total 136, Failed 1, Errors 0, Passed 135, Skipped 0
[info] == core-kafka / test-finish ==
[info] 
[info] == core-kafka / Test cleanup 1 ==
[info] Deleting directory /var/tmp/sbt_efc390bd
[info] == core-kafka / Test cleanup 1 ==
[info] 
[info] == core-kafka / test-cleanup ==
[info] == core-kafka / test-cleanup ==
[info] 
[info] == hadoop producer / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == hadoop producer / test-compile ==
[info] 
[info] == java-examples / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == java-examples / test-compile ==
[info] 
[info] == hadoop producer / copy-resources ==
[info] == hadoop producer / copy-resources ==
[info] 
[info] == hadoop producer / copy-test-resources ==
[info] == hadoop producer / copy-test-resources ==
[info] 
[info] == hadoop consumer / copy-test-resources ==
[info] == hadoop consumer / copy-test-resources ==
[info] 
[info] == perf / copy-resources ==
[info] == perf / copy-resources ==
[info] 
[info] == java-examples / copy-test-resources ==
[info] == java-examples / copy-test-resources ==
[info] 
[info] == java-examples / copy-resources ==
[info] == java-examples / copy-resources ==
[info] 
[info] == hadoop consumer / test-compile ==
[info]   Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed.
[info] Compiling test sources...
[info] Nothing to compile.
[info]   Post-analysis: 0 classes.
[info] == hadoop consumer / test-compile ==
[info] 
[info] == hadoop consumer / copy-resources ==
[info] == hadoop consumer / copy-resources ==
[error] Error running kafka.message.CompressionUtilTest: Test FAILED
[error] Error running test: One or more subtasks failed
[info] 
[info] Total time: 495 s, completed Aug 6, 2012 6:10:16 PM
[info] 
[info] Total session time: 496 s, completed Aug 6, 2012 6:10:16 PM
[error] Error during build.
Build step 'Execute shell' marked build as failure

Re: Build failed in Jenkins: Kafka-0.8 #16

Posted by Neha Narkhede <ne...@gmail.com>.
Victor,

Three unit tests broke after the KAFKA-343 checkin. Please can you
file relevant JIRAs ?

0m[ [31merror [0m]  [0mError running kafka.producer.SyncProducerTest:
Test FAILED [0m
 [0m[ [31merror [0m]  [0mError running kafka.server.LogRecoveryTest:
Test FAILED [0m
 [0m[ [31merror [0m]  [0mError running
kafka.server.ServerShutdownTest: Test FAILED [0m

Thanks,
Neha

On Wed, Aug 1, 2012 at 10:27 AM, Apache Jenkins Server
<je...@builds.apache.org> wrote:
> See <https://builds.apache.org/job/Kafka-0.8/16/changes>
>
> Changes:
>
> [junrao] recommit: revisit the become leader and become follower state change operations using V3 design; patched by Yang Ye; reviewed by Neha Narkhede and Jun Rao; kafka-343
>
> ------------------------------------------
> [...truncated 3415 lines...]
> [2012-08-01 17:27:08,432] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,-1,1048576) (kafka.server.KafkaApis:99)
> kafka.common.OffsetOutOfRangeException: offset -1 is out of range
>         at kafka.log.Log$.findRange(Log.scala:46)
>         at kafka.log.Log.read(Log.scala:265)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
>         at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
>         at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
>         at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
>         at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
>         at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-08-01 17:27:08,446] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
> java.io.IOException: Connection reset by peer
>         at sun.nio.ch.FileDispatcher.read0(Native Method)
>         at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
>         at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
>         at sun.nio.ch.IOUtil.read(IOUtil.java:171)
>         at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
>         at kafka.utils.Utils$.read(Utils.scala:630)
>         at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
>         at kafka.network.Processor.read(SocketServer.scala:296)
>         at kafka.network.Processor.run(SocketServer.scala:212)
>         at java.lang.Thread.run(Thread.java:662)
>  [0m[ [0minfo [0m]  [0mTest Passed: testResetToEarliestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest) [0m
> [2012-08-01 17:27:09,203] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,10000,1048576) (kafka.server.KafkaApis:99)
> kafka.common.OffsetOutOfRangeException: offset 10000 is out of range
>         at kafka.log.Log$.findRange(Log.scala:46)
>         at kafka.log.Log.read(Log.scala:265)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
>         at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
>         at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
>         at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
>         at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
>         at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-08-01 17:27:11,197] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
> java.io.IOException: Connection reset by peer
>         at sun.nio.ch.FileDispatcher.read0(Native Method)
>         at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
>         at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
>         at sun.nio.ch.IOUtil.read(IOUtil.java:171)
>         at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
>         at kafka.utils.Utils$.read(Utils.scala:630)
>         at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
>         at kafka.network.Processor.read(SocketServer.scala:296)
>         at kafka.network.Processor.run(SocketServer.scala:212)
>         at java.lang.Thread.run(Thread.java:662)
>  [0m[ [0minfo [0m]  [0mTest Passed: testResetToLatestWhenOffsetTooHigh(kafka.integration.AutoOffsetResetTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest) [0m
> [2012-08-01 17:27:12,365] ERROR KafkaApi on Broker 0, error when processing request (test_topic,0,-1,1048576) (kafka.server.KafkaApis:99)
> kafka.common.OffsetOutOfRangeException: offset -1 is out of range
>         at kafka.log.Log$.findRange(Log.scala:46)
>         at kafka.log.Log.read(Log.scala:265)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSet(KafkaApis.scala:377)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:333)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1$$anonfun$apply$21.apply(KafkaApis.scala:332)
>         at scala.collection.mutable.ResizableArray$class.foreach(ResizableArray.scala:57)
>         at scala.collection.mutable.ArrayBuffer.foreach(ArrayBuffer.scala:43)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:332)
>         at kafka.server.KafkaApis$$anonfun$kafka$server$KafkaApis$$readMessageSets$1.apply(KafkaApis.scala:328)
>         at scala.collection.IndexedSeqOptimized$class.foreach(IndexedSeqOptimized.scala:34)
>         at scala.collection.mutable.WrappedArray.foreach(WrappedArray.scala:32)
>         at kafka.server.KafkaApis.kafka$server$KafkaApis$$readMessageSets(KafkaApis.scala:328)
>         at kafka.server.KafkaApis.handleFetchRequest(KafkaApis.scala:272)
>         at kafka.server.KafkaApis.handle(KafkaApis.scala:59)
>         at kafka.server.KafkaRequestHandler.run(KafkaRequestHandler.scala:38)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-08-01 17:27:13,044] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a0beb0012, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:13,246] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a0beb0016, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:14,333] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
> java.io.IOException: Connection reset by peer
>         at sun.nio.ch.FileDispatcher.read0(Native Method)
>         at sun.nio.ch.SocketDispatcher.read(SocketDispatcher.java:21)
>         at sun.nio.ch.IOUtil.readIntoNativeBuffer(IOUtil.java:198)
>         at sun.nio.ch.IOUtil.read(IOUtil.java:171)
>         at sun.nio.ch.SocketChannelImpl.read(SocketChannelImpl.java:243)
>         at kafka.utils.Utils$.read(Utils.scala:630)
>         at kafka.network.BoundedByteBufferReceive.readFrom(BoundedByteBufferReceive.scala:54)
>         at kafka.network.Processor.read(SocketServer.scala:296)
>         at kafka.network.Processor.run(SocketServer.scala:212)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-08-01 17:27:14,347] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0004, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
>  [0m[ [0minfo [0m]  [0mTest Passed: testResetToLatestWhenOffsetTooLow(kafka.integration.AutoOffsetResetTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.AutoOffsetResetTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.TopicMetadataTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testTopicMetadataRequest(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testTopicMetadataRequest(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testBasicTopicMetadata(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testBasicTopicMetadata(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testAutoCreateTopic(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testAutoCreateTopic(kafka.integration.TopicMetadataTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.integration.TopicMetadataTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.server.LeaderElectionTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest) [0m
> [2012-08-01 17:27:15,189] ERROR Kafka Log on Broker 1, Cannot truncate log to 0 since the log start offset is 0 and end offset is 0 (kafka.log.Log:93)
> [2012-08-01 17:27:15,694] ERROR Closing socket for /67.195.138.9 because of error (kafka.network.Processor:99)
> java.io.IOException: Connection reset by peer
>         at sun.nio.ch.FileDispatcher.write0(Native Method)
>         at sun.nio.ch.SocketDispatcher.write(SocketDispatcher.java:29)
>         at sun.nio.ch.IOUtil.writeFromNativeBuffer(IOUtil.java:69)
>         at sun.nio.ch.IOUtil.write(IOUtil.java:40)
>         at sun.nio.ch.SocketChannelImpl.write(SocketChannelImpl.java:334)
>         at kafka.api.PartitionDataSend.writeTo(FetchResponse.scala:66)
>         at kafka.network.MultiSend.writeTo(Transmission.scala:94)
>         at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
>         at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
>         at kafka.api.TopicDataSend.writeTo(FetchResponse.scala:142)
>         at kafka.network.MultiSend.writeTo(Transmission.scala:94)
>         at kafka.network.Send$class.writeCompletely(Transmission.scala:75)
>         at kafka.network.MultiSend.writeCompletely(Transmission.scala:87)
>         at kafka.api.FetchResponseSend.writeTo(FetchResponse.scala:219)
>         at kafka.network.Processor.write(SocketServer.scala:321)
>         at kafka.network.Processor.run(SocketServer.scala:214)
>         at java.lang.Thread.run(Thread.java:662)
> [2012-08-01 17:27:15,834] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0007, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:15,835] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a167e0012, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:17,261] ERROR Kafka Log on Broker 1, Cannot truncate log to 0 since the log start offset is 0 and end offset is 0 (kafka.log.Log:93)
> [2012-08-01 17:27:19,635] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0014, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:19,636] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0015, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:19,645] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0006, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:19,665] WARN EndOfStreamException: Unable to read additional data from client sessionid 0x138e33a252b0018, likely client has closed socket (org.apache.zookeeper.server.NIOServerCnxn:634)
> [2012-08-01 17:27:19,728] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
> java.nio.channels.CancelledKeyException
>         at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
>         at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
>         at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
>         at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
> [2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
> java.nio.channels.CancelledKeyException
>         at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
>         at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
>         at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
>         at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
> [2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
> java.nio.channels.CancelledKeyException
>         at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
>         at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
>         at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
>         at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
> [2012-08-01 17:27:19,729] ERROR Unexpected Exception:  (org.apache.zookeeper.server.NIOServerCnxn:445)
> java.nio.channels.CancelledKeyException
>         at sun.nio.ch.SelectionKeyImpl.ensureValid(SelectionKeyImpl.java:55)
>         at sun.nio.ch.SelectionKeyImpl.interestOps(SelectionKeyImpl.java:59)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendBuffer(NIOServerCnxn.java:418)
>         at org.apache.zookeeper.server.NIOServerCnxn.sendResponse(NIOServerCnxn.java:1509)
>         at org.apache.zookeeper.server.FinalRequestProcessor.processRequest(FinalRequestProcessor.java:171)
>         at org.apache.zookeeper.server.SyncRequestProcessor.run(SyncRequestProcessor.java:135)
>  [0m[ [0minfo [0m]  [0mTest Passed: testLeaderElectionAndEpoch(kafka.server.LeaderElectionTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.server.LeaderElectionTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
> log4j:WARN No appenders could be found for logger (org.I0Itec.zkclient.ZkEventThread).
> log4j:WARN Please initialize the log4j system properly.
>  [0m[ [0minfo [0m]  [0mTest Passed: testKafkaLog4jConfigs(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testZkConnectLog4jAppends(kafka.log4j.KafkaLog4jAppenderTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.log4j.KafkaLog4jAppenderTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest == [0m
>  [0m[ [0minfo [0m]  [0mTest Starting: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest) [0m
>  [0m[ [0minfo [0m]  [0mTest Passed: testBasic(kafka.javaapi.consumer.ZookeeperConsumerConnectorTest) [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / kafka.javaapi.consumer.ZookeeperConsumerConnectorTest == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / Test cleanup 1 == [0m
>  [0m[ [0minfo [0m]  [0mDeleting directory /tmp/sbt_501f0f08 [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / Test cleanup 1 == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-finish == [0m
>  [0m[ [31merror [0m]  [0mFailed: : Total 136, Failed 3, Errors 0, Passed 133, Skipped 0 [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-finish == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-cleanup == [0m
>  [0m[ [0minfo [0m]  [34m== core-kafka / test-cleanup == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / test-compile == [0m
>  [0m[ [0minfo [0m]  [0m  Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed. [0m
>  [0m[ [0minfo [0m]  [0mCompiling test sources... [0m
>  [0m[ [0minfo [0m]  [0mNothing to compile. [0m
>  [0m[ [0minfo [0m]  [0m  Post-analysis: 0 classes. [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / test-compile == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== perf / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m== perf / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== perf / test-compile == [0m
>  [0m[ [0minfo [0m]  [0m  Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed. [0m
>  [0m[ [0minfo [0m]  [0mCompiling test sources... [0m
>  [0m[ [0minfo [0m]  [0mNothing to compile. [0m
>  [0m[ [0minfo [0m]  [0m  Post-analysis: 0 classes. [0m
>  [0m[ [0minfo [0m]  [34m== perf / test-compile == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / test-compile == [0m
>  [0m[ [0minfo [0m]  [0m  Source analysis: 0 new/modified, 0 indirectly invalidated, 0 removed. [0m
>  [0m[ [0minfo [0m]  [0mCompiling test sources... [0m
>  [0m[ [0minfo [0m]  [0mNothing to compile. [0m
>  [0m[ [0minfo [0m]  [0m  Post-analysis: 0 classes. [0m
>  [0m[ [0minfo [0m]  [34m== hadoop consumer / test-compile == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== perf / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m== perf / copy-test-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== hadoop producer / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m== hadoop producer / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / copy-resources == [0m
>  [0m[ [0minfo [0m]  [34m== java-examples / copy-resources == [0m
>  [0m[ [31merror [0m]  [0mError running kafka.producer.SyncProducerTest: Test FAILED [0m
>  [0m[ [31merror [0m]  [0mError running kafka.server.LogRecoveryTest: Test FAILED [0m
>  [0m[ [31merror [0m]  [0mError running kafka.server.ServerShutdownTest: Test FAILED [0m
>  [0m[ [31merror [0m]  [0mError running test: One or more subtasks failed [0m
>  [0m[ [0minfo [0m]  [0m [0m
>  [0m[ [0minfo [0m]  [0mTotal time: 229 s, completed Aug 1, 2012 5:27:24 PM [0m
>  [0m[ [0minfo [0m]  [0m [0m
>  [0m[ [0minfo [0m]  [0mTotal session time: 229 s, completed Aug 1, 2012 5:27:24 PM [0m
>  [0m[ [31merror [0m]  [0mError during build. [0m
> Build step 'Execute shell' marked build as failure