You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by ma...@apache.org on 2011/12/15 10:08:39 UTC
svn commit: r1214666 - in /hadoop/common/branches/branch-1.0: CHANGES.txt
src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
Author: mattf
Date: Thu Dec 15 09:08:39 2011
New Revision: 1214666
URL: http://svn.apache.org/viewvc?rev=1214666&view=rev
Log:
MAPREDUCE-3319. Hadoop example multifilewc broken in 0.20.205.0. Contributed by Subroto Sanyal.
Modified:
hadoop/common/branches/branch-1.0/CHANGES.txt
hadoop/common/branches/branch-1.0/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
Modified: hadoop/common/branches/branch-1.0/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/CHANGES.txt?rev=1214666&r1=1214665&r2=1214666&view=diff
==============================================================================
--- hadoop/common/branches/branch-1.0/CHANGES.txt (original)
+++ hadoop/common/branches/branch-1.0/CHANGES.txt Thu Dec 15 09:08:39 2011
@@ -61,6 +61,9 @@ Release 1.0.0 - 2011.11.27
BUG FIXES
+ MAPREDUCE-3319. Hadoop example "multifilewc" broken in 0.20.205.0.
+ (Subroto Sanyal via mattf)
+
HDFS-2589. Remove unnecessary hftp token fetch and renewal thread.
(Daryn Sharp via mattf)
Modified: hadoop/common/branches/branch-1.0/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1.0/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java?rev=1214666&r1=1214665&r2=1214666&view=diff
==============================================================================
--- hadoop/common/branches/branch-1.0/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java (original)
+++ hadoop/common/branches/branch-1.0/src/examples/org/apache/hadoop/examples/MultiFileWordCount.java Thu Dec 15 09:08:39 2011
@@ -30,7 +30,7 @@ import org.apache.hadoop.conf.Configured
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.IntWritable;
+import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.FileInputFormat;
@@ -205,13 +205,13 @@ public class MultiFileWordCount extends
* This Mapper is similar to the one in {@link WordCount.MapClass}.
*/
public static class MapClass extends MapReduceBase
- implements Mapper<WordOffset, Text, Text, IntWritable> {
+ implements Mapper<WordOffset, Text, Text, LongWritable> {
- private final static IntWritable one = new IntWritable(1);
+ private final static LongWritable one = new LongWritable(1);
private Text word = new Text();
public void map(WordOffset key, Text value,
- OutputCollector<Text, IntWritable> output, Reporter reporter)
+ OutputCollector<Text, LongWritable> output, Reporter reporter)
throws IOException {
String line = value.toString();
@@ -244,7 +244,7 @@ public class MultiFileWordCount extends
// the keys are words (strings)
job.setOutputKeyClass(Text.class);
// the values are counts (ints)
- job.setOutputValueClass(IntWritable.class);
+ job.setOutputValueClass(LongWritable.class);
//use the defined mapper
job.setMapperClass(MapClass.class);