You are viewing a plain text version of this content. The canonical link for it is here.
Posted to users@kafka.apache.org by 凌德聪 <li...@gmail.com> on 2014/06/17 10:49:01 UTC

How can i build the hadoop-consumer of kafka

I want to build hadoop-consumer, the i follow this
https://github.com/kafka-dev/kafka/tree/master/contrib/hadoop-consumer
[image: 內置圖片 1]

but i got a lot of error messages when i run "sbt package"
[image: 內置圖片 2]

my system info are as below

system:CentOS release 5.10 (Final) x86_64

scala:Scala code runner version 2.10.4 -- Copyright 2002-2013, LAMP/EPFL

sbt:0.13.5

hadoop:2.2.0

kafka:0.8.1



error message

[info] Set current project to hadoop-consumer (in build
file:/home/hadoop/kafka/contrib/hadoop-consumer/)

[info] Compiling 12 Java sources to
/home/hadoop/kafka/contrib/hadoop-consumer/target/scala-2.10/classes...

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:27:
package kafka.api does not exist

[error] import kafka.api.FetchRequest;

[error]                 ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:28:
package kafka.api does not exist

[error] import kafka.api.FetchRequestBuilder;

[error]                 ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:29:
package kafka.api does not exist

[error] import kafka.api.PartitionOffsetRequestInfo;

[error]                 ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:30:
package kafka.common does not exist

[error] import kafka.common.TopicAndPartition;

[error]                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:31:
package kafka.javaapi does not exist

[error] import kafka.javaapi.FetchResponse;

[error]                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:32:
package kafka.javaapi does not exist

[error] import kafka.javaapi.OffsetRequest;

[error]                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:33:
package kafka.javaapi.consumer does not exist

[error] import kafka.javaapi.consumer.SimpleConsumer;

[error]                              ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:34:
package kafka.javaapi.message does not exist

[error] import kafka.javaapi.message.ByteBufferMessageSet;

[error]                             ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:35:
package kafka.message does not exist

[error] import kafka.message.MessageAndOffset;

[error]                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:36:
package org.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.BytesWritable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:37:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.JobConf;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:38:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.OutputCollector;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:39:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.Reporter;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:40:
package org.apache.hadoop.mapred.lib does not exist

[error] import org.apache.hadoop.mapred.lib.MultipleOutputs;

[error]                                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:23:
packageorg.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.WritableComparable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:25:
cannot find symbol

[error] symbol: class WritableComparable

[error] public class KafkaETLKey implements WritableComparable<KafkaETLKey>{

[error]                                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:25:
interface expected here

[error] public class KafkaETLKey implements WritableComparable<KafkaETLKey>{

[error]                                                       ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:57:
cannot find symbol

[error] symbol  : class SimpleConsumer

[error] location: class kafka.etl.KafkaETLContext

[error]     protected SimpleConsumer _consumer = null; /*simple consumer*/

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:63:
cannot find symbol

[error] symbol  : class FetchResponse

[error] location: class kafka.etl.KafkaETLContext

[error]     protected FetchResponse _response = null;  /*fetch response*/

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:64:
cannot find symbol

[error] symbol  : class MessageAndOffset

[error] location: class kafka.etl.KafkaETLContext

[error]     protected Iterator<MessageAndOffset> _messageIt = null;
/*message iterator*/

[error]                        ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:65:
cannot find symbol

[error] symbol  : class ByteBufferMessageSet

[error] location: class kafka.etl.KafkaETLContext

[error]     protected Iterator<ByteBufferMessageSet> _respIterator = null;

[error]                        ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:72:
cannot find symbol

[error] symbol  : class Reporter

[error] location: class kafka.etl.KafkaETLContext

[error]     protected Reporter _reporter;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:74:
cannot find symbol

[error] symbol  : class MultipleOutputs

[error] location: class kafka.etl.KafkaETLContext

[error]     protected MultipleOutputs _mos;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:75:
cannot find symbol

[error] symbol  : class OutputCollector

[error] location: class kafka.etl.KafkaETLContext

[error]     protected OutputCollector<KafkaETLKey, BytesWritable>
_offsetOut = null;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:75:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLContext

[error]     protected OutputCollector<KafkaETLKey, BytesWritable>
_offsetOut = null;

[error]                                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:76:
cannot find symbol

[error] symbol  : class FetchRequestBuilder

[error] location: class kafka.etl.KafkaETLContext

[error]     protected FetchRequestBuilder builder = new
FetchRequestBuilder();

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:94:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLContext

[error]     public KafkaETLContext(JobConf job, Props props, Reporter
reporter,

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:37:
package kafka.common does not exist

[error] import kafka.common.KafkaException;

[error]                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:38:
package org.apache.log4j does not exist

[error] import org.apache.log4j.Logger;

[error]                        ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:94:
cannot find symbol

[error] symbol  : class Reporter

[error] location: class kafka.etl.KafkaETLContext

[error]     public KafkaETLContext(JobConf job, Props props, Reporter
reporter,

[error]                                                      ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:95:
cannot find symbol

[error] symbol  : class MultipleOutputs

[error] location: class kafka.etl.KafkaETLContext

[error]                                     MultipleOutputs mos, int index,
String input)

[error]                                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:134:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLContext

[error]     public boolean getNext(KafkaETLKey key, BytesWritable value)
throws IOException {

[error]                                             ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:195:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLContext

[error]     protected boolean get(KafkaETLKey key, BytesWritable value)
throws IOException {

[error]                                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:43:
cannot find symbol

[error] symbol  : class Logger

[error] location: class kafka.etl.Props

[error] private static Logger logger = Logger.getLogger(Props.class);

[error]                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:23:
package kafka.consumer does not exist

[error] import kafka.consumer.SimpleConsumer;

[error]                      ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:24:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.FileSystem;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:25:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.Path;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:26:
package org.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.BytesWritable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:27:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.InputSplit;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:28:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.JobConf;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:29:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.OutputCollector;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:30:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.RecordReader;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:31:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.Reporter;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:32:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.SequenceFileInputFormat;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:33:
package org.apache.hadoop.mapred.lib does not exist

[error] import org.apache.hadoop.mapred.lib.MultipleOutputs;

[error]                                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:38:
cannot find symbol

[error] symbol: class SequenceFileInputFormat

[error] extends SequenceFileInputFormat<KafkaETLKey, BytesWritable> {

[error]         ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:38:
cannot find symbol

[error] symbol: class BytesWritable

[error] extends SequenceFileInputFormat<KafkaETLKey, BytesWritable> {

[error]                                              ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:48:
cannot find symbol

[error] symbol  : class SimpleConsumer

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected SimpleConsumer _consumer;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:50:
cannot find symbol

[error] symbol  : class MultipleOutputs

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected MultipleOutputs _mos;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
cannot find symbol

[error] symbol  : class OutputCollector

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected OutputCollector<BytesWritable, BytesWritable>
_offsetOut = null;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected OutputCollector<BytesWritable, BytesWritable>
_offsetOut = null;

[error]                               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected OutputCollector<BytesWritable, BytesWritable>
_offsetOut = null;

[error]                                              ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
cannot find symbol

[error] symbol  : class InputSplit

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     public RecordReader<KafkaETLKey, BytesWritable>
getRecordReader(InputSplit split,

[error]
^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:64:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLInputFormat

[error]                                     JobConf job, Reporter reporter)

[error]                                     ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:64:
cannot find symbol

[error] symbol  : class Reporter

[error] location: class kafka.etl.KafkaETLInputFormat

[error]                                     JobConf job, Reporter reporter)

[error]                                                  ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
cannot find symbol

[error] symbol  : class RecordReader

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     public RecordReader<KafkaETLKey, BytesWritable>
getRecordReader(InputSplit split,

[error]            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     public RecordReader<KafkaETLKey, BytesWritable>
getRecordReader(InputSplit split,

[error]                                      ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:70:
cannot find symbol

[error] symbol  : class FileSystem

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected boolean isSplitable(FileSystem fs, Path file) {

[error]                                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:70:
cannot find symbol

[error] symbol  : class Path

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     protected boolean isSplitable(FileSystem fs, Path file) {

[error]                                                  ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:75:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     public InputSplit[] getSplits(JobConf conf, int numSplits)
throws IOException {

[error]                                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:75:
cannot find symbol

[error] symbol  : class InputSplit

[error] location: class kafka.etl.KafkaETLInputFormat

[error]     public InputSplit[] getSplits(JobConf conf, int numSplits)
throws IOException {

[error]            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:21:
package org.apache.hadoop.filecache does not exist

[error] import org.apache.hadoop.filecache.DistributedCache;

[error]                                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:22:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.FileStatus;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:23:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.FileSystem;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:24:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.Path;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:25:
packageorg.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.BytesWritable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:26:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.JobConf;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:27:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.SequenceFileOutputFormat;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:28:
package org.apache.hadoop.mapred.lib does not exist

[error] import org.apache.hadoop.mapred.lib.MultipleOutputs;

[error]                                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:38:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLJob

[error]     public static JobConf createJobConf(String name, String topic,
Props props, Class classobj)

[error]                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:61:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLJob

[error]     public static JobConf getJobConf(String name, Props props,
Class classobj) throws Exception {

[error]                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:162:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLJob

[error]     public static void setClassLoaderAndJar(JobConf conf,

[error]                                             ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:23:
package kafka.common does not exist

[error] import kafka.common.KafkaException;

[error]                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:24:
package org.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.BytesWritable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:25:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.FileSplit;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:26:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.InputSplit;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:27:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.JobConf;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:28:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.Reporter;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:29:
package org.apache.hadoop.mapred does not exist

[error] import org.apache.hadoop.mapred.SequenceFileRecordReader;

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:30:
package org.apache.hadoop.mapred.lib does not exist

[error] import org.apache.hadoop.mapred.lib.MultipleOutputs;

[error]                                    ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:34:
cannot find symbol

[error] symbol: class SequenceFileRecordReader

[error] extends SequenceFileRecordReader<KafkaETLKey, BytesWritable> {

[error]         ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:34:
cannot find symbol

[error] symbol: class BytesWritable

[error] extends SequenceFileRecordReader<KafkaETLKey, BytesWritable> {

[error]                                               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:38:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     protected JobConf _job;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:39:
cannot find symbol

[error] symbol  : class Reporter

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     protected Reporter _reporter ;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:40:
cannot find symbol

[error] symbol  : class MultipleOutputs

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     protected MultipleOutputs _mos;

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
cannot find symbol

[error] symbol  : class InputSplit

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
Reporter reporter)

[error]                                 ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
cannot find symbol

[error] symbol  : class JobConf

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
Reporter reporter)

[error]                                                   ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
cannot find symbol

[error] symbol  : class Reporter

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
Reporter reporter)

[error]                                                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:100:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     public BytesWritable createValue() {

[error]            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:121:
cannot find symbol

[error] symbol  : class BytesWritable

[error] location: class kafka.etl.KafkaETLRecordReader

[error]     public synchronized boolean next(KafkaETLKey key, BytesWritable
value)

[error]                                                       ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:37:
package org.apache.hadoop.conf does not exist

[error] import org.apache.hadoop.conf.Configuration;

[error]                              ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:38:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.FileStatus;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:39:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.FileSystem;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:40:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.Path;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:41:
package org.apache.hadoop.fs does not exist

[error] import org.apache.hadoop.fs.PathFilter;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:42:
packageorg.apache.hadoop.io does not exist

[error] import org.apache.hadoop.io.BytesWritable;

[error]                            ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:46:
cannot find symbol

[error] symbol  : class PathFilter

[error] location: class kafka.etl.KafkaETLUtils

[error] public static PathFilter PATH_FILTER = new PathFilter() {

[error]               ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
cannot find symbol

[error] symbol  : class Path

[error] location: class kafka.etl.KafkaETLUtils

[error] public static Path getLastPath(Path path, FileSystem fs) throws
IOException {

[error]                                ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
cannot find symbol

[error] symbol  : class FileSystem

[error] location: class kafka.etl.KafkaETLUtils

[error] public static Path getLastPath(Path path, FileSystem fs) throws
IOException {

[error]                                           ^

[error]
/home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
cannot find symbol

[error] symbol  : class Path

[error] location: class kafka.etl.KafkaETLUtils

[error] public static Path getLastPath(Path path, FileSystem fs) throws
IOException {

[error]               ^

[error] 100 errors

[error] (compile:compile) javac returned nonzero exit code

[error] Total time: 1 s, completed Jun 17, 2014 4:07:57 PM

Re: How can i build the hadoop-consumer of kafka

Posted by Jun Rao <ju...@gmail.com>.
0.8.1 uses gradle for building jars. You can follow the README file in the
source code on how to use gradle.

Thanks,

Jun


On Tue, Jun 17, 2014 at 1:49 AM, 凌德聪 <li...@gmail.com> wrote:

> I want to build hadoop-consumer, the i follow this
> https://github.com/kafka-dev/kafka/tree/master/contrib/hadoop-consumer
> [image: 內置圖片 1]
>
> but i got a lot of error messages when i run "sbt package"
> [image: 內置圖片 2]
>
> my system info are as below
>
> system:CentOS release 5.10 (Final) x86_64
>
> scala:Scala code runner version 2.10.4 -- Copyright 2002-2013, LAMP/EPFL
>
> sbt:0.13.5
>
> hadoop:2.2.0
>
> kafka:0.8.1
>
>
>
> error message
>
> [info] Set current project to hadoop-consumer (in build
> file:/home/hadoop/kafka/contrib/hadoop-consumer/)
>
> [info] Compiling 12 Java sources to
> /home/hadoop/kafka/contrib/hadoop-consumer/target/scala-2.10/classes...
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:27:
> package kafka.api does not exist
>
> [error] import kafka.api.FetchRequest;
>
> [error]                 ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:28:
> package kafka.api does not exist
>
> [error] import kafka.api.FetchRequestBuilder;
>
> [error]                 ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:29:
> package kafka.api does not exist
>
> [error] import kafka.api.PartitionOffsetRequestInfo;
>
> [error]                 ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:30:
> package kafka.common does not exist
>
> [error] import kafka.common.TopicAndPartition;
>
> [error]                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:31:
> package kafka.javaapi does not exist
>
> [error] import kafka.javaapi.FetchResponse;
>
> [error]                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:32:
> package kafka.javaapi does not exist
>
> [error] import kafka.javaapi.OffsetRequest;
>
> [error]                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:33:
> package kafka.javaapi.consumer does not exist
>
> [error] import kafka.javaapi.consumer.SimpleConsumer;
>
> [error]                              ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:34:
> package kafka.javaapi.message does not exist
>
> [error] import kafka.javaapi.message.ByteBufferMessageSet;
>
> [error]                             ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:35:
> package kafka.message does not exist
>
> [error] import kafka.message.MessageAndOffset;
>
> [error]                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:36:
> package org.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.BytesWritable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:37:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.JobConf;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:38:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.OutputCollector;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:39:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.Reporter;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:40:
> package org.apache.hadoop.mapred.lib does not exist
>
> [error] import org.apache.hadoop.mapred.lib.MultipleOutputs;
>
> [error]                                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:23:
> packageorg.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.WritableComparable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:25:
> cannot find symbol
>
> [error] symbol: class WritableComparable
>
> [error] public class KafkaETLKey implements
> WritableComparable<KafkaETLKey>{
>
> [error]                                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLKey.java:25:
> interface expected here
>
> [error] public class KafkaETLKey implements
> WritableComparable<KafkaETLKey>{
>
> [error]                                                       ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:57:
> cannot find symbol
>
> [error] symbol  : class SimpleConsumer
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected SimpleConsumer _consumer = null; /*simple consumer*/
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:63:
> cannot find symbol
>
> [error] symbol  : class FetchResponse
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected FetchResponse _response = null;  /*fetch response*/
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:64:
> cannot find symbol
>
> [error] symbol  : class MessageAndOffset
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected Iterator<MessageAndOffset> _messageIt = null;
> /*message iterator*/
>
> [error]                        ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:65:
> cannot find symbol
>
> [error] symbol  : class ByteBufferMessageSet
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected Iterator<ByteBufferMessageSet> _respIterator = null;
>
> [error]                        ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:72:
> cannot find symbol
>
> [error] symbol  : class Reporter
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected Reporter _reporter;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:74:
> cannot find symbol
>
> [error] symbol  : class MultipleOutputs
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected MultipleOutputs _mos;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:75:
> cannot find symbol
>
> [error] symbol  : class OutputCollector
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected OutputCollector<KafkaETLKey, BytesWritable>
> _offsetOut = null;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:75:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected OutputCollector<KafkaETLKey, BytesWritable>
> _offsetOut = null;
>
> [error]                                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:76:
> cannot find symbol
>
> [error] symbol  : class FetchRequestBuilder
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected FetchRequestBuilder builder = new
> FetchRequestBuilder();
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:94:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     public KafkaETLContext(JobConf job, Props props, Reporter
> reporter,
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:37:
> package kafka.common does not exist
>
> [error] import kafka.common.KafkaException;
>
> [error]                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:38:
> package org.apache.log4j does not exist
>
> [error] import org.apache.log4j.Logger;
>
> [error]                        ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:94:
> cannot find symbol
>
> [error] symbol  : class Reporter
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     public KafkaETLContext(JobConf job, Props props, Reporter
> reporter,
>
> [error]                                                      ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:95:
> cannot find symbol
>
> [error] symbol  : class MultipleOutputs
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]                                     MultipleOutputs mos, int
> index, String input)
>
> [error]                                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:134:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     public boolean getNext(KafkaETLKey key, BytesWritable value)
> throws IOException {
>
> [error]                                             ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLContext.java:195:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLContext
>
> [error]     protected boolean get(KafkaETLKey key, BytesWritable value)
> throws IOException {
>
> [error]                                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/Props.java:43:
> cannot find symbol
>
> [error] symbol  : class Logger
>
> [error] location: class kafka.etl.Props
>
> [error] private static Logger logger = Logger.getLogger(Props.class);
>
> [error]                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:23:
> package kafka.consumer does not exist
>
> [error] import kafka.consumer.SimpleConsumer;
>
> [error]                      ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:24:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.FileSystem;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:25:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.Path;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:26:
> package org.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.BytesWritable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:27:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.InputSplit;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:28:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.JobConf;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:29:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.OutputCollector;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:30:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.RecordReader;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:31:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.Reporter;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:32:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.SequenceFileInputFormat;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:33:
> package org.apache.hadoop.mapred.lib does not exist
>
> [error] import org.apache.hadoop.mapred.lib.MultipleOutputs;
>
> [error]                                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:38:
> cannot find symbol
>
> [error] symbol: class SequenceFileInputFormat
>
> [error] extends SequenceFileInputFormat<KafkaETLKey, BytesWritable> {
>
> [error]         ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:38:
> cannot find symbol
>
> [error] symbol: class BytesWritable
>
> [error] extends SequenceFileInputFormat<KafkaETLKey, BytesWritable> {
>
> [error]                                              ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:48:
> cannot find symbol
>
> [error] symbol  : class SimpleConsumer
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected SimpleConsumer _consumer;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:50:
> cannot find symbol
>
> [error] symbol  : class MultipleOutputs
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected MultipleOutputs _mos;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
> cannot find symbol
>
> [error] symbol  : class OutputCollector
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected OutputCollector<BytesWritable, BytesWritable>
> _offsetOut = null;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected OutputCollector<BytesWritable, BytesWritable>
> _offsetOut = null;
>
> [error]                               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:51:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected OutputCollector<BytesWritable, BytesWritable>
> _offsetOut = null;
>
> [error]                                              ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
> cannot find symbol
>
> [error] symbol  : class InputSplit
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     public RecordReader<KafkaETLKey, BytesWritable>
> getRecordReader(InputSplit split,
>
> [error]
>   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:64:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]                                     JobConf job, Reporter reporter)
>
> [error]                                     ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:64:
> cannot find symbol
>
> [error] symbol  : class Reporter
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]                                     JobConf job, Reporter reporter)
>
> [error]                                                  ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
> cannot find symbol
>
> [error] symbol  : class RecordReader
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     public RecordReader<KafkaETLKey, BytesWritable>
> getRecordReader(InputSplit split,
>
> [error]            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:63:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     public RecordReader<KafkaETLKey, BytesWritable>
> getRecordReader(InputSplit split,
>
> [error]                                      ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:70:
> cannot find symbol
>
> [error] symbol  : class FileSystem
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected boolean isSplitable(FileSystem fs, Path file) {
>
> [error]                                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:70:
> cannot find symbol
>
> [error] symbol  : class Path
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     protected boolean isSplitable(FileSystem fs, Path file) {
>
> [error]                                                  ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:75:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     public InputSplit[] getSplits(JobConf conf, int numSplits)
> throws IOException {
>
> [error]                                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLInputFormat.java:75:
> cannot find symbol
>
> [error] symbol  : class InputSplit
>
> [error] location: class kafka.etl.KafkaETLInputFormat
>
> [error]     public InputSplit[] getSplits(JobConf conf, int numSplits)
> throws IOException {
>
> [error]            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:21:
> package org.apache.hadoop.filecache does not exist
>
> [error] import org.apache.hadoop.filecache.DistributedCache;
>
> [error]                                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:22:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.FileStatus;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:23:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.FileSystem;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:24:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.Path;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:25:
> packageorg.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.BytesWritable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:26:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.JobConf;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:27:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.SequenceFileOutputFormat;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:28:
> package org.apache.hadoop.mapred.lib does not exist
>
> [error] import org.apache.hadoop.mapred.lib.MultipleOutputs;
>
> [error]                                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:38:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLJob
>
> [error]     public static JobConf createJobConf(String name, String topic,
> Props props, Class classobj)
>
> [error]                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:61:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLJob
>
> [error]     public static JobConf getJobConf(String name, Props props,
> Class classobj) throws Exception {
>
> [error]                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLJob.java:162:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLJob
>
> [error]     public static void setClassLoaderAndJar(JobConf conf,
>
> [error]                                             ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:23:
> package kafka.common does not exist
>
> [error] import kafka.common.KafkaException;
>
> [error]                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:24:
> package org.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.BytesWritable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:25:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.FileSplit;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:26:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.InputSplit;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:27:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.JobConf;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:28:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.Reporter;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:29:
> package org.apache.hadoop.mapred does not exist
>
> [error] import org.apache.hadoop.mapred.SequenceFileRecordReader;
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:30:
> package org.apache.hadoop.mapred.lib does not exist
>
> [error] import org.apache.hadoop.mapred.lib.MultipleOutputs;
>
> [error]                                    ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:34:
> cannot find symbol
>
> [error] symbol: class SequenceFileRecordReader
>
> [error] extends SequenceFileRecordReader<KafkaETLKey, BytesWritable> {
>
> [error]         ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:34:
> cannot find symbol
>
> [error] symbol: class BytesWritable
>
> [error] extends SequenceFileRecordReader<KafkaETLKey, BytesWritable> {
>
> [error]                                               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:38:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     protected JobConf _job;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:39:
> cannot find symbol
>
> [error] symbol  : class Reporter
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     protected Reporter _reporter ;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:40:
> cannot find symbol
>
> [error] symbol  : class MultipleOutputs
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     protected MultipleOutputs _mos;
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
> cannot find symbol
>
> [error] symbol  : class InputSplit
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
> Reporter reporter)
>
> [error]                                 ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
> cannot find symbol
>
> [error] symbol  : class JobConf
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
> Reporter reporter)
>
> [error]                                                   ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:52:
> cannot find symbol
>
> [error] symbol  : class Reporter
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     public KafkaETLRecordReader(InputSplit split, JobConf job,
> Reporter reporter)
>
> [error]                                                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:100:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     public BytesWritable createValue() {
>
> [error]            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLRecordReader.java:121:
> cannot find symbol
>
> [error] symbol  : class BytesWritable
>
> [error] location: class kafka.etl.KafkaETLRecordReader
>
> [error]     public synchronized boolean next(KafkaETLKey key,
> BytesWritable value)
>
> [error]                                                       ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:37:
> package org.apache.hadoop.conf does not exist
>
> [error] import org.apache.hadoop.conf.Configuration;
>
> [error]                              ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:38:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.FileStatus;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:39:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.FileSystem;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:40:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.Path;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:41:
> package org.apache.hadoop.fs does not exist
>
> [error] import org.apache.hadoop.fs.PathFilter;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:42:
> packageorg.apache.hadoop.io does not exist
>
> [error] import org.apache.hadoop.io.BytesWritable;
>
> [error]                            ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:46:
> cannot find symbol
>
> [error] symbol  : class PathFilter
>
> [error] location: class kafka.etl.KafkaETLUtils
>
> [error] public static PathFilter PATH_FILTER = new PathFilter() {
>
> [error]               ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
> cannot find symbol
>
> [error] symbol  : class Path
>
> [error] location: class kafka.etl.KafkaETLUtils
>
> [error] public static Path getLastPath(Path path, FileSystem fs) throws
> IOException {
>
> [error]                                ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
> cannot find symbol
>
> [error] symbol  : class FileSystem
>
> [error] location: class kafka.etl.KafkaETLUtils
>
> [error] public static Path getLastPath(Path path, FileSystem fs) throws
> IOException {
>
> [error]                                           ^
>
> [error]
> /home/hadoop/kafka/contrib/hadoop-consumer/src/main/java/kafka/etl/KafkaETLUtils.java:55:
> cannot find symbol
>
> [error] symbol  : class Path
>
> [error] location: class kafka.etl.KafkaETLUtils
>
> [error] public static Path getLastPath(Path path, FileSystem fs) throws
> IOException {
>
> [error]               ^
>
> [error] 100 errors
>
> [error] (compile:compile) javac returned nonzero exit code
>
> [error] Total time: 1 s, completed Jun 17, 2014 4:07:57 PM
>