You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by Apache Wiki <wi...@apache.org> on 2008/09/24 02:12:57 UTC

[Hadoop Wiki] Update of "WordCount" by SuzanneMatthews

Dear Wiki user,

You have subscribed to a wiki page or wiki category on "Hadoop Wiki" for change notification.

The following page has been changed by SuzanneMatthews:
http://wiki.apache.org/hadoop/WordCount

The comment on the change is:
added standard wordcount example to this page

------------------------------------------------------------------------------
  
  Word count supports generic options : see DevelopmentCommandLineOptions
  
+ Below is the standard wordcount example implemented in Java:
+ 
+ 
+ {{{#!java
+ package org.myorg;
+  	
+ import java.io.IOException;
+ import java.util.*;
+  	
+ import org.apache.hadoop.fs.Path;
+ import org.apache.hadoop.conf.*;
+ import org.apache.hadoop.io.*;
+ import org.apache.hadoop.mapred.*;
+ import org.apache.hadoop.util.*;
+  	
+ public class WordCount {
+  	
+  public static class Map extends MapReduceBase implements Mapper<LongWritable, Text, Text, IntWritable> {
+     private final static IntWritable one = new IntWritable(1);
+     private Text word = new Text();
+  	
+     public void map(LongWritable key, Text value, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
+         String line = value.toString();
+  	StringTokenizer tokenizer = new StringTokenizer(line);
+  	while (tokenizer.hasMoreTokens()) {
+  	    word.set(tokenizer.nextToken());
+  	    output.collect(word, one);
+  	}
+     }
+  } 
+  	
+  public static class Reduce extends MapReduceBase implements Reducer<Text, IntWritable, Text, IntWritable> {
+ 
+     public void reduce(Text key, Iterator<IntWritable> values, OutputCollector<Text, IntWritable> output, Reporter reporter) throws IOException {
+         int sum = 0;
+  	while (values.hasNext()) {
+  	    sum += values.next().get();
+  	}
+  	output.collect(key, new IntWritable(sum));
+     }
+  }
+  	
+  public static void main(String[] args) throws Exception {
+     JobConf conf = new JobConf(WordCount.class);
+     conf.setJobName("wordcount");
+  	
+     conf.setOutputKeyClass(Text.class);
+     conf.setOutputValueClass(IntWritable.class);
+ 	
+     conf.setMapperClass(Map.class);
+     conf.setCombinerClass(Reduce.class);
+     conf.setReducerClass(Reduce.class);
+  	
+     conf.setInputFormat(TextInputFormat.class);
+     conf.setOutputFormat(TextOutputFormat.class);
+  	
+     FileInputFormat.setInputPaths(conf, new Path(args[0]));
+     FileOutputFormat.setOutputPath(conf, new Path(args[1]));
+  	
+     JobClient.runJob(conf);
+  }
+  	
+ }
+ }}}
+