You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/03/31 00:17:28 UTC

svn commit: r390258 - in /lucene/hadoop/trunk/src: examples/org/apache/hadoop/examples/ java/org/apache/hadoop/mapred/lib/

Author: cutting
Date: Thu Mar 30 14:17:26 2006
New Revision: 390258

URL: http://svn.apache.org/viewcvs?rev=390258&view=rev
Log:
Fix for HADOOP-103.  Add a base class for Mapper and Reducer implementations that implements Closeable and JobConfigurable.  Use it in supplied Mappers & Reducers.  Also some minor improvements to demos.  Contributed by Owen O'Malley.

Modified:
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java Thu Mar 30 14:17:26 2006
@@ -30,6 +30,7 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 /**
  * This is an example Hadoop Map/Reduce application.
@@ -49,9 +50,10 @@
    * For each line of input, break the line into words and emit them as
    * (<b>word</b>, <b>1</b>).
    */
-  public static class MapClass implements Mapper {
+  public static class MapClass extends MapReduceBase implements Mapper {
     
     private final static IntWritable one = new IntWritable(1);
+    private UTF8 word = new UTF8();
     
     public void map(WritableComparable key, Writable value, 
         OutputCollector output, 
@@ -59,23 +61,16 @@
       String line = ((UTF8)value).toString();
       StringTokenizer itr = new StringTokenizer(line);
       while (itr.hasMoreTokens()) {
-        String word = itr.nextToken();
-        output.collect(new UTF8(word), one);
+        word.set(itr.nextToken());
+        output.collect(word, one);
       }
     }
-    
-    public void configure(JobConf job) {
-    }
-    
-    public void close() {
-    }
-
   }
   
   /**
    * A reducer class that just emits the sum of the input values.
    */
-  public static class Reduce implements Reducer {
+  public static class Reduce extends MapReduceBase implements Reducer {
     
     public void reduce(WritableComparable key, Iterator values,
         OutputCollector output, 
@@ -86,13 +81,6 @@
       }
       output.collect(key, new IntWritable(sum));
     }
-    
-    public void configure(JobConf job) {
-    }
-    
-    public void close() {
-    }
-    
   }
   
   static void printUsage() {
@@ -150,7 +138,7 @@
     conf.setOutputDir(new File((String) other_args.get(1)));
     
     // Uncomment to run locally in a single process
-    // countJob.set("mapred.job.tracker", "local");
+    // conf.set("mapred.job.tracker", "local");
     
     JobClient.runJob(conf);
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityMapper.java Thu Mar 30 14:17:26 2006
@@ -20,16 +20,14 @@
 
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 
 /** Implements the identity function, mapping inputs directly to outputs. */
-public class IdentityMapper implements Mapper {
-
-  public void configure(JobConf job) {}
+public class IdentityMapper extends MapReduceBase implements Mapper {
 
   /** The identify function.  Input key/value pair is written directly to
    * output.*/
@@ -38,5 +36,4 @@
     throws IOException {
     output.collect(key, val);
   }
-	public void close() {}
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/IdentityReducer.java Thu Mar 30 14:17:26 2006
@@ -22,16 +22,14 @@
 
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 
 /** Performs no reduction, writing all input values directly to the output. */
-public class IdentityReducer implements Reducer {
-
-  public void configure(JobConf job) {}
+public class IdentityReducer extends MapReduceBase implements Reducer {
 
   /** Writes all keys and values directly to output. */
   public void reduce(WritableComparable key, Iterator values,
@@ -41,7 +39,5 @@
       output.collect(key, (Writable)values.next());
     }
   }
-	
-	public void close() {}
 	
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/InverseMapper.java Thu Mar 30 14:17:26 2006
@@ -20,17 +20,15 @@
 
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
 
 
 /** A {@link Mapper} that swaps keys and values. */
-public class InverseMapper implements Mapper {
-
-  public void configure(JobConf job) {}
+public class InverseMapper extends MapReduceBase implements Mapper {
 
   /** The inverse function.  Input keys and values are swapped.*/
   public void map(WritableComparable key, Writable value,
@@ -38,7 +36,5 @@
     throws IOException {
     output.collect((WritableComparable)value, key);
   }
-  
-  public void close() {}
   
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/LongSumReducer.java Thu Mar 30 14:17:26 2006
@@ -21,16 +21,14 @@
 
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.LongWritable;
 
 /** A {@link Reducer} that sums long values. */
-public class LongSumReducer implements Reducer {
-
-  public void configure(JobConf job) {}
+public class LongSumReducer extends MapReduceBase implements Reducer {
 
   public void reduce(WritableComparable key, Iterator values,
                      OutputCollector output, Reporter reporter)
@@ -45,7 +43,5 @@
     // output sum
     output.collect(key, new LongWritable(sum));
   }
-  
-  public void close() {}
-  
+
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/RegexMapper.java Thu Mar 30 14:17:26 2006
@@ -22,6 +22,7 @@
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
@@ -34,7 +35,7 @@
 
 
 /** A {@link Mapper} that extracts text matching a regular expression. */
-public class RegexMapper implements Mapper {
+public class RegexMapper extends MapReduceBase implements Mapper {
 
   private Pattern pattern;
   private int group;
@@ -53,7 +54,5 @@
       output.collect(new UTF8(matcher.group(group)), new LongWritable(1));
     }
   }
-  
-  public void close() {}
-  
+
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java?rev=390258&r1=390257&r2=390258&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/TokenCountMapper.java Thu Mar 30 14:17:26 2006
@@ -21,8 +21,8 @@
 
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
-import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.MapReduceBase;
 
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.Writable;
@@ -32,15 +32,12 @@
 
 /** A {@link Mapper} that maps text values into <token,freq> pairs.  Uses
  * {@link StringTokenizer} to break text into tokens. */
-public class TokenCountMapper implements Mapper {
-
-  public void configure(JobConf job) {}
+public class TokenCountMapper extends MapReduceBase implements Mapper {
 
   public void map(WritableComparable key, Writable value,
                   OutputCollector output, Reporter reporter)
     throws IOException {
     // get input text
-    long position = ((LongWritable)key).get();    // key is position in file
     String text = ((UTF8)value).toString();       // value is line of text
 
     // tokenize the value
@@ -50,7 +47,5 @@
       output.collect(new UTF8(st.nextToken()), new LongWritable(1));
     }  
   }
-  
-  public void close() {}
   
 }