You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2007/03/23 19:03:16 UTC

svn commit: r521835 - in /lucene/hadoop/trunk: CHANGES.txt src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java

Author: tomwhite
Date: Fri Mar 23 11:03:15 2007
New Revision: 521835

URL: http://svn.apache.org/viewvc?view=rev&rev=521835
Log:
HADOOP-1150.  Fix streaming -reducer and -mapper to give them defaults.   Contributed by Owen O'Malley.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?view=diff&rev=521835&r1=521834&r2=521835
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri Mar 23 11:03:15 2007
@@ -11,6 +11,9 @@
 
  3. HADOOP-1140.  Fix a deadlock in metrics. (David Bowen via cutting)
 
+ 4. HADOOP-1150.  Fix streaming -reducer and -mapper to give them
+    defaults. (Owen O'Malley via tomwhite)
+
 
 Release 0.12.1 - 2007-03-17
 

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java?view=diff&rev=521835&r1=521834&r2=521835
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/java/org/apache/hadoop/streaming/StreamJob.java Fri Mar 23 11:03:15 2007
@@ -368,12 +368,12 @@
         "DFS output directory for the Reduce step", 
         "path", 1, true); 
     Option mapper  = createOption("mapper", 
-        "The streaming command to run", "cmd", 1, true);
+        "The streaming command to run", "cmd", 1, false);
     Option combiner = createOption("combiner", 
         "The streaming command to run", "cmd",1, false);
     // reducer could be NONE 
     Option reducer = createOption("reducer", 
-        "The streaming command to run", "cmd", 1, true); 
+        "The streaming command to run", "cmd", 1, false); 
     Option file = createOption("file", 
         "File/dir to be shipped in the Job jar file", 
         "file", Integer.MAX_VALUE, false, execValidator); 
@@ -692,12 +692,16 @@
 
     String defaultPackage = this.getClass().getPackage().getName();
 
-    Class c = StreamUtil.goodClassOrNull(mapCmd_, defaultPackage);
-    if (c != null) {
-      jobConf_.setMapperClass(c);
-    } else {
-      jobConf_.setMapperClass(PipeMapper.class);
-      jobConf_.set("stream.map.streamprocessor", URLEncoder.encode(mapCmd_, "UTF-8"));
+    Class c;
+    if (mapCmd_ != null) {
+      c = StreamUtil.goodClassOrNull(mapCmd_, defaultPackage);
+      if (c != null) {
+        jobConf_.setMapperClass(c);
+      } else {
+        jobConf_.setMapperClass(PipeMapper.class);
+        jobConf_.set("stream.map.streamprocessor", 
+                     URLEncoder.encode(mapCmd_, "UTF-8"));
+      }
     }
 
     if (comCmd_ != null) {