You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ma...@apache.org on 2011/12/10 00:12:31 UTC
svn commit: r1212678 - in /hadoop/common/branches/branch-1: CHANGES.txt
src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
Author: mattf
Date: Fri Dec 9 23:12:31 2011
New Revision: 1212678
URL: http://svn.apache.org/viewvc?rev=1212678&view=rev
Log:
MAPREDUCE-3319. Hadoop example multifilewc broken in 0.20.205.0. Contributed by Subroto Sanyal.
Modified:
hadoop/common/branches/branch-1/CHANGES.txt
hadoop/common/branches/branch-1/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
Modified: hadoop/common/branches/branch-1/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/CHANGES.txt?rev=1212678&r1=1212677&r2=1212678&view=diff
==============================================================================
--- hadoop/common/branches/branch-1/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1/CHANGES.txt Fri Dec 9 23:12:31 2011
@@ -60,6 +60,9 @@ Release 1.1.0 - unreleased
HDFS-2637. The rpc timeout for block recovery is too low. (eli)
+ MAPREDUCE-3319. Hadoop example "multifilewc" broken in 0.20.205.0.
+ (Subroto Sanyal via mattf)
+
IMPROVEMENTS
MAPREDUCE-3008. [Gridmix] Improve cumulative CPU usage emulation for
Modified: hadoop/common/branches/branch-1/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java?rev=1212678&r1=1212677&r2=1212678&view=diff
==============================================================================
--- hadoop/common/branches/branch-1/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java (original)
+++ hadoop/common/branches/branch-1/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java Fri Dec 9 23:12:31 2011
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configured
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
@@ -205,13 +205,13 @@ public class MultiFileWordCount extends
* This Mapper is similar to the one in {@link WordCount.MapClass}.
*/
public static class MapClass extends MapReduceBase
- implements Mapper<WordOffset, Text, Text, IntWritable> {
+ implements Mapper<WordOffset, Text, Text, LongWritable> {
- private final static IntWritable one = new IntWritable(1);
+ private final static LongWritable one = new LongWritable(1);
private Text word = new Text();
public void map(WordOffset key, Text value,
- OutputCollector<Text, IntWritable> output, Reporter reporter)
+ OutputCollector<Text, LongWritable> output, Reporter reporter)
throws IOException {
String line = value.toString();
@@ -244,7 +244,7 @@ public class MultiFileWordCount extends
// the keys are words (strings)
job.setOutputKeyClass(Text.class);
// the values are counts (ints)
- job.setOutputValueClass(IntWritable.class);
+ job.setOutputValueClass(LongWritable.class);
//use the defined mapper
job.setMapperClass(MapClass.class);