You are viewing a plain text version of this content. The canonical link for it is here.
Posted to general@hadoop.apache.org by Harshit Kumar <hk...@gmail.com> on 2009/09/07 11:48:13 UTC
Type mismatch in value from map: expected org.apache.hadoop.io.Text,
recieved org.apache.hadoop.io.IntWritable
I get this error on WordCount program, originally copied from the examples
folder in Hadoop-0.19.2.
I dont understand why I get this error. Everything is set right. Please see
the code and give feedback.
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
public class WordCount {
public static class Map extends MapReduceBase implements
Mapper<LongWritable, Text, Text, Text> {
private Text word = new Text();
private Text upperWord = new Text();
public void map(LongWritable key, Text value, OutputCollector<Text,
Text> output, Reporter reporter) throws IOException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
String temp =tokenizer.nextToken();
word.set(temp);
upperWord.set(temp.toUpperCase());
output.collect(word, upperWord);
}
}
}
public static class Reduce extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
String sum = new String();
boolean flag=true;
while (values.hasNext()) {
if(!flag)
sum = sum.concat(", "+values.next().toString());
else
sum = values.next().toString();
flag=false;
}
output.collect(key, new Text(sum));
}
}
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(WordCount.class);
conf.setJobName("wordcount");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(Text.class);
conf.setMapperClass(Map.class);
//conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path("In"));
FileOutputFormat.setOutputPath(conf, new Path("Outer"));
JobClient.runJob(conf);
}
}
Thanks
H. Kumar
Phone(Mobile): +82-10-2892-9663
Phone(Office): +82-31-
skype: harshit900
Blog: http://harshitkumar.wordpress.com
Website: http:/kumarharmuscat.tripod.com
Fwd: Type mismatch in value from map: expected org.apache.hadoop.io.Text,
recieved org.apache.hadoop.io.IntWritable
Posted by Harshit Kumar <hk...@gmail.com>.
I get this error on WordCount program, originally copied from the examples
folder in Hadoop-0.19.2.
I dont understand why I get this error. Everything is set right. Please see
the code and give feedback.
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
public class WordCount {
public static class Map extends MapReduceBase implements
Mapper<LongWritable, Text, Text, Text> {
private Text word = new Text();
private Text upperWord = new Text();
public void map(LongWritable key, Text value, OutputCollector<Text,
Text> output, Reporter reporter) throws IOException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
String temp =tokenizer.nextToken();
word.set(temp);
upperWord.set(temp.toUpperCase());
output.collect(word, upperWord);
}
}
}
public static class Reduce extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
String sum = new String();
boolean flag=true;
while (values.hasNext()) {
if(!flag)
sum = sum.concat(", "+values.next().toString());
else
sum = values.next().toString();
flag=false;
}
output.collect(key, new Text(sum));
}
}
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(WordCount.class);
conf.setJobName("wordcount");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(Text.class);
conf.setMapperClass(Map.class);
//conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path("In"));
FileOutputFormat.setOutputPath(conf, new Path("Outer"));
JobClient.runJob(conf);
}
}
Thanks
H. Kumar
Phone(Mobile): +82-10-2892-9663
Phone(Office): +82-31-
skype: harshit900
Blog: http://harshitkumar.wordpress.com
Website: http:/kumarharmuscat.tripod.com
Fwd: Type mismatch in value from map: expected org.apache.hadoop.io.Text,
recieved org.apache.hadoop.io.IntWritable
Posted by Harshit Kumar <hk...@gmail.com>.
I get this error on WordCount program, originally copied from the examples
folder in Hadoop-0.19.2.
I dont understand why I get this error. Everything is set right. Please see
the code and give feedback.
import java.io.IOException;
import java.util.*;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.conf.*;
import org.apache.hadoop.io.*;
import org.apache.hadoop.mapred.*;
import org.apache.hadoop.util.*;
public class WordCount {
public static class Map extends MapReduceBase implements
Mapper<LongWritable, Text, Text, Text> {
private Text word = new Text();
private Text upperWord = new Text();
public void map(LongWritable key, Text value, OutputCollector<Text,
Text> output, Reporter reporter) throws IOException {
String line = value.toString();
StringTokenizer tokenizer = new StringTokenizer(line);
while (tokenizer.hasMoreTokens()) {
String temp =tokenizer.nextToken();
word.set(temp);
upperWord.set(temp.toUpperCase());
output.collect(word, upperWord);
}
}
}
public static class Reduce extends MapReduceBase implements
Reducer<Text, Text, Text, Text> {
public void reduce(Text key, Iterator<Text> values,
OutputCollector<Text, Text> output, Reporter reporter) throws IOException {
String sum = new String();
boolean flag=true;
while (values.hasNext()) {
if(!flag)
sum = sum.concat(", "+values.next().toString());
else
sum = values.next().toString();
flag=false;
}
output.collect(key, new Text(sum));
}
}
public static void main(String[] args) throws Exception {
JobConf conf = new JobConf(WordCount.class);
conf.setJobName("wordcount");
conf.setOutputKeyClass(Text.class);
conf.setOutputValueClass(Text.class);
conf.setMapOutputKeyClass(Text.class);
conf.setMapOutputValueClass(Text.class);
conf.setMapperClass(Map.class);
//conf.setCombinerClass(Reduce.class);
conf.setReducerClass(Reduce.class);
conf.setInputFormat(TextInputFormat.class);
conf.setOutputFormat(TextOutputFormat.class);
FileInputFormat.setInputPaths(conf, new Path("In"));
FileOutputFormat.setOutputPath(conf, new Path("Outer"));
JobClient.runJob(conf);
}
}
Thanks
H. Kumar
Phone(Mobile): +82-10-2892-9663
Phone(Office): +82-31-
skype: harshit900
Blog: http://harshitkumar.wordpress.com
Website: http:/kumarharmuscat.tripod.com