You are viewing a plain text version of this content. The canonical link for it is here.
Posted to dev@kafka.apache.org by "John Fung (JIRA)" <ji...@apache.org> on 2013/07/03 23:58:20 UTC
[jira] [Resolved] (KAFKA-729) Gzip compression codec complains
about missing SnappyInputStream
[ https://issues.apache.org/jira/browse/KAFKA-729?page=com.atlassian.jira.plugin.system.issuetabpanels:all-tabpanel ]
John Fung resolved KAFKA-729.
-----------------------------
Resolution: Fixed
Not an issue any more.
> Gzip compression codec complains about missing SnappyInputStream
> ----------------------------------------------------------------
>
> Key: KAFKA-729
> URL: https://issues.apache.org/jira/browse/KAFKA-729
> Project: Kafka
> Issue Type: Bug
> Reporter: John Fung
> Priority: Critical
>
> $ bin/kafka-run-class.sh kafka.perf.ProducerPerformance --broker-list localhost:9092 --topic test_1 --messages 10 --batch-size 1 --compression-codec 1
> java.lang.NoClassDefFoundError: org/xerial/snappy/SnappyInputStream
> at kafka.message.ByteBufferMessageSet$.kafka$message$ByteBufferMessageSet$$create(ByteBufferMessageSet.scala:41)
> at kafka.message.ByteBufferMessageSet.<init>(ByteBufferMessageSet.scala:98)
> at kafka.producer.async.DefaultEventHandler$$anonfun$4.apply(DefaultEventHandler.scala:291)
> at kafka.producer.async.DefaultEventHandler$$anonfun$4.apply(DefaultEventHandler.scala:279)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.TraversableLike$$anonfun$map$1.apply(TraversableLike.scala:206)
> at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:80)
> at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:80)
> at scala.collection.Iterator$class.foreach(Iterator.scala:631)
> at scala.collection.mutable.HashTable$$anon$1.foreach(HashTable.scala:161)
> at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:194)
> at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)
> at scala.collection.mutable.HashMap.foreach(HashMap.scala:80)
> at scala.collection.TraversableLike$class.map(TraversableLike.scala:206)
> at scala.collection.mutable.HashMap.map(HashMap.scala:39)
> at kafka.producer.async.DefaultEventHandler.kafka$producer$async$DefaultEventHandler$$groupMessagesToSet(DefaultEventHandler.scala:279)
> at kafka.producer.async.DefaultEventHandler$$anonfun$dispatchSerializedData$1.apply(DefaultEventHandler.scala:102)
> at kafka.producer.async.DefaultEventHandler$$anonfun$dispatchSerializedData$1.apply(DefaultEventHandler.scala:98)
> at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:80)
> at scala.collection.mutable.HashMap$$anonfun$foreach$1.apply(HashMap.scala:80)
> at scala.collection.Iterator$class.foreach(Iterator.scala:631)
> at scala.collection.mutable.HashTable$$anon$1.foreach(HashTable.scala:161)
> at scala.collection.mutable.HashTable$class.foreachEntry(HashTable.scala:194)
> at scala.collection.mutable.HashMap.foreachEntry(HashMap.scala:39)
> at scala.collection.mutable.HashMap.foreach(HashMap.scala:80)
> at kafka.producer.async.DefaultEventHandler.dispatchSerializedData(DefaultEventHandler.scala:98)
> at kafka.producer.async.DefaultEventHandler.handle(DefaultEventHandler.scala:72)
> at kafka.producer.async.ProducerSendThread.tryToHandle(ProducerSendThread.scala:104)
> at kafka.producer.async.ProducerSendThread$$anonfun$processEvents$3.apply(ProducerSendThread.scala:87)
> at kafka.producer.async.ProducerSendThread$$anonfun$processEvents$3.apply(ProducerSendThread.scala:67)
> at scala.collection.immutable.Stream.foreach(Stream.scala:254)
> at kafka.producer.async.ProducerSendThread.processEvents(ProducerSendThread.scala:66)
> at kafka.producer.async.ProducerSendThread.run(ProducerSendThread.scala:44)
--
This message is automatically generated by JIRA.
If you think it was sent incorrectly, please contact your JIRA administrators
For more information on JIRA, see: http://www.atlassian.com/software/jira