You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/09/06 01:10:45 UTC

svn commit: r573095 - in /lucene/hadoop/trunk: ./ src/examples/org/apache/hadoop/examples/ src/examples/org/apache/hadoop/examples/dancing/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/s3/ src/java/org/ap...

Author: cutting
Date: Wed Sep  5 16:10:42 2007
New Revision: 573095

URL: http://svn.apache.org/viewvc?rev=573095&view=rev
Log:
HADOOP-1425.  Replace uses ToolBase with the Tool interface.  Contributed by Enis.

Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiEstimator.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
    lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Wed Sep  5 16:10:42 2007
@@ -160,6 +160,9 @@
     HADOOP-1825.  Create $HADOOP_PID_DIR when it does not exist.
     (Michael Bieniosek via cutting)
 
+    HADOOP-1425.  Replace uses of ToolBase with the Tool interface.
+    (Enis Soztutar via cutting)
+
 
 Release 0.14.1 - 2007-09-04
 

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Grep.java Wed Sep  5 16:10:42 2007
@@ -17,73 +17,81 @@
  */
 package org.apache.hadoop.examples;
 
-import org.apache.hadoop.mapred.JobConf;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.SequenceFileOutputFormat;
-import org.apache.hadoop.mapred.SequenceFileInputFormat;
-
-import org.apache.hadoop.mapred.lib.RegexMapper;
-import org.apache.hadoop.mapred.lib.InverseMapper;
-import org.apache.hadoop.mapred.lib.LongSumReducer;
-
-import org.apache.hadoop.io.LongWritable;
-import org.apache.hadoop.io.Text;
+import java.util.Random;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
-
-import java.util.Random;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapred.lib.*;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /* Extracts matching regexs from input files and counts them. */
-public class Grep {
+public class Grep extends Configured implements Tool {
   private Grep() {}                               // singleton
 
-  public static void main(String[] args) throws Exception {
+  public int run(String[] args) throws Exception {
     if (args.length < 3) {
       System.out.println("Grep <inDir> <outDir> <regex> [<group>]");
-      System.exit(-1);
+      ToolRunner.printGenericCommandUsage(System.out);
+      return -1;
     }
 
     Path tempDir =
       new Path("grep-temp-"+
-               Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
+          Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
+
+    JobConf grepJob = new JobConf(getConf(), Grep.class);
+    
+    try {
+      
+      grepJob.setJobName("grep-search");
 
-    JobConf grepJob = new JobConf(Grep.class);
-    grepJob.setJobName("grep-search");
+      grepJob.setInputPath(new Path(args[0]));
 
-    grepJob.setInputPath(new Path(args[0]));
+      grepJob.setMapperClass(RegexMapper.class);
+      grepJob.set("mapred.mapper.regex", args[2]);
+      if (args.length == 4)
+        grepJob.set("mapred.mapper.regex.group", args[3]);
 
-    grepJob.setMapperClass(RegexMapper.class);
-    grepJob.set("mapred.mapper.regex", args[2]);
-    if (args.length == 4)
-      grepJob.set("mapred.mapper.regex.group", args[3]);
-    
-    grepJob.setCombinerClass(LongSumReducer.class);
-    grepJob.setReducerClass(LongSumReducer.class);
+      grepJob.setCombinerClass(LongSumReducer.class);
+      grepJob.setReducerClass(LongSumReducer.class);
 
-    grepJob.setOutputPath(tempDir);
-    grepJob.setOutputFormat(SequenceFileOutputFormat.class);
-    grepJob.setOutputKeyClass(Text.class);
-    grepJob.setOutputValueClass(LongWritable.class);
+      grepJob.setOutputPath(tempDir);
+      grepJob.setOutputFormat(SequenceFileOutputFormat.class);
+      grepJob.setOutputKeyClass(Text.class);
+      grepJob.setOutputValueClass(LongWritable.class);
 
-    JobClient.runJob(grepJob);
+      JobClient.runJob(grepJob);
 
-    JobConf sortJob = new JobConf(Grep.class);
-    sortJob.setJobName("grep-sort");
+      JobConf sortJob = new JobConf(Grep.class);
+      sortJob.setJobName("grep-sort");
 
-    sortJob.setInputPath(tempDir);
-    sortJob.setInputFormat(SequenceFileInputFormat.class);
+      sortJob.setInputPath(tempDir);
+      sortJob.setInputFormat(SequenceFileInputFormat.class);
 
-    sortJob.setMapperClass(InverseMapper.class);
+      sortJob.setMapperClass(InverseMapper.class);
 
-    sortJob.setNumReduceTasks(1);                 // write a single file
-    sortJob.setOutputPath(new Path(args[1]));
-    sortJob.setOutputKeyComparatorClass           // sort by decreasing freq
+      sortJob.setNumReduceTasks(1);                 // write a single file
+      sortJob.setOutputPath(new Path(args[1]));
+      sortJob.setOutputKeyComparatorClass           // sort by decreasing freq
       (LongWritable.DecreasingComparator.class);
 
-    JobClient.runJob(sortJob);
+      JobClient.runJob(sortJob);
+    }
+    finally {
+      FileSystem.get(grepJob).delete(tempDir);
+    }
+    return 0;
+  }
 
-    FileSystem.get(grepJob).delete(tempDir);
+  public static void main(String[] args) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new Grep(), args);
+    System.exit(res);
   }
 
 }

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiEstimator.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiEstimator.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiEstimator.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/PiEstimator.java Wed Sep  5 16:10:42 2007
@@ -21,7 +21,9 @@
 import java.io.IOException;
 import java.util.Iterator;
 import java.util.Random;
+
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.LongWritable;
@@ -29,13 +31,23 @@
 import org.apache.hadoop.io.Writable;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reducer;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * A Map-reduce program to estimaate the valu eof Pi using monte-carlo
  * method.
  */
-public class PiEstimator {
+public class PiEstimator extends Configured implements Tool {
   
   /**
    * Mappper class for Pi estimation.
@@ -47,6 +59,7 @@
     /** Mapper configuration.
      *
      */
+    @Override
     public void configure(JobConf job) {
     }
     
@@ -83,6 +96,7 @@
       out.collect(new LongWritable(1), new LongWritable(numInside));
     }
     
+    @Override
     public void close() {
       // nothing
     }
@@ -98,6 +112,7 @@
     /** Reducer configuration.
      *
      */
+    @Override
     public void configure(JobConf job) {
       conf = job;
     }
@@ -124,6 +139,7 @@
       }
     }
       
+    @Override
     public void close() throws IOException {
       Path tmpDir = new Path("test-mini-mr");
       Path outDir = new Path(tmpDir, "out");
@@ -141,11 +157,10 @@
    * This is the main driver for computing the value of Pi using
    * monte-carlo method.
    */
-  static double launch(int numMaps, long numPoints, String jt, String dfs)
+  double launch(int numMaps, long numPoints, String jt, String dfs)
     throws IOException {
 
-    Configuration conf = new Configuration();
-    JobConf jobConf = new JobConf(conf, PiEstimator.class);
+    JobConf jobConf = new JobConf(getConf(), PiEstimator.class);
     if (jt != null) { jobConf.set("mapred.job.tracker", jt); }
     if (dfs != null) { jobConf.set("fs.default.name", dfs); }
     jobConf.setJobName("test-mini-mr");
@@ -194,7 +209,7 @@
       long startTime = System.currentTimeMillis();
       JobClient.runJob(jobConf);
       System.out.println("Job Finished in "+
-                         (double)(System.currentTimeMillis() - startTime)/1000.0 + " seconds");
+                         (System.currentTimeMillis() - startTime)/1000.0 + " seconds");
       Path inFile = new Path(outDir, "reduce-out");
       SequenceFile.Reader reader = new SequenceFile.Reader(fileSys, inFile,
                                                            jobConf);
@@ -202,7 +217,7 @@
       LongWritable numOutside = new LongWritable();
       reader.next(numInside, numOutside);
       reader.close();
-      estimate = (double) (numInside.get()*4.0)/(numMaps*numPoints);
+      estimate = (numInside.get()*4.0)/(numMaps*numPoints);
     } finally {
       fileSys.delete(tmpDir);
     }
@@ -213,18 +228,27 @@
   /**
    * Launches all the tasks in order.
    */
-  public static void main(String[] argv) throws Exception {
-    if (argv.length < 2) {
+  public int run(String[] args) throws Exception {
+    if (args.length < 2) {
       System.err.println("Usage: TestMiniMR <nMaps> <nSamples>");
-      return;
+      ToolRunner.printGenericCommandUsage(System.err);
+      return -1;
     }
-
-    int nMaps = Integer.parseInt(argv[0]);
-    long nSamples = Long.parseLong(argv[1]);
+    
+    int nMaps = Integer.parseInt(args[0]);
+    long nSamples = Long.parseLong(args[1]);
         
     System.out.println("Number of Maps = "+nMaps+" Samples per Map = "+nSamples);
         
     System.out.println("Estimated value of PI is "+
                        launch(nMaps, nSamples, null, null));
+    
+    return 0;
   }
+  
+  public static void main(String[] argv) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new PiEstimator(), argv);
+    System.exit(res);
+  }
+
 }

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/RandomWriter.java Wed Sep  5 16:10:42 2007
@@ -23,11 +23,28 @@
 import java.util.Random;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.mapred.*;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
+import org.apache.hadoop.mapred.ClusterStatus;
+import org.apache.hadoop.mapred.FileSplit;
+import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.InputSplit;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.RecordReader;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.SequenceFileOutputFormat;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolBase;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This program uses map/reduce to just run a distributed job where there is
@@ -64,7 +81,7 @@
  * Equivalently, {@link RandomWriter} also supports all the above options
  * and ones supported by {@link ToolBase} via the command-line.
  */
-public class RandomWriter extends ToolBase {
+public class RandomWriter extends Configured implements Tool {
   
   /**
    * User counters
@@ -186,6 +203,7 @@
      * Save the values out of the configuaration that we need to write
      * the data.
      */
+    @Override
     public void configure(JobConf job) {
       numBytesToWrite = job.getLong("test.randomwrite.bytes_per_map",
                                     1*1024*1024*1024);
@@ -208,12 +226,13 @@
    */
   public int run(String[] args) throws Exception {    
     if (args.length == 0) {
-      System.out.println("Usage: writer <out-dir> [<config>]");
+      System.out.println("Usage: writer <out-dir>");
+      ToolRunner.printGenericCommandUsage(System.out);
       return -1;
     }
     
     Path outDir = new Path(args[0]);
-    JobConf job = new JobConf(conf);
+    JobConf job = new JobConf(getConf());
     
     job.setJarByClass(RandomWriter.class);
     job.setJobName("random-writer");
@@ -263,7 +282,7 @@
   }
   
   public static void main(String[] args) throws Exception {
-    int res = new RandomWriter().doMain(new Configuration(), args);
+    int res = ToolRunner.run(new Configuration(), new RandomWriter(), args);
     System.exit(res);
   }
 

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/Sort.java Wed Sep  5 16:10:42 2007
@@ -18,14 +18,18 @@
 
 package org.apache.hadoop.examples;
 
-import java.io.*;
+import java.io.IOException;
 import java.util.*;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.mapred.lib.*;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This is the trivial map/reduce program that does absolutely nothing
@@ -34,40 +38,40 @@
  * To run: bin/hadoop jar build/hadoop-examples.jar sort
  *            [-m <i>maps</i>] [-r <i>reduces</i>] <i>in-dir</i> <i>out-dir</i> 
  */
-public class Sort {
-  
-  static void printUsage() {
+public class Sort extends Configured implements Tool {
+
+  static int printUsage() {
     System.out.println("sort [-m <maps>] [-r <reduces>] <input> <output>");
-    System.exit(1);
+    ToolRunner.printGenericCommandUsage(System.out);
+    return -1;
   }
-  
+
   /**
    * The main driver for sort program.
    * Invoke this method to submit the map/reduce job.
    * @throws IOException When there is communication problems with the 
    *                     job tracker.
    */
-  public static void main(String[] args) throws IOException {
-    Configuration defaults = new Configuration();
-    
-    JobConf jobConf = new JobConf(defaults, Sort.class);
+  public int run(String[] args) throws Exception {
+
+    JobConf jobConf = new JobConf(getConf(), Sort.class);
     jobConf.setJobName("sorter");
- 
+
     jobConf.setInputFormat(SequenceFileInputFormat.class);
     jobConf.setOutputFormat(SequenceFileOutputFormat.class);
-   
+
     jobConf.setOutputKeyClass(BytesWritable.class);
     jobConf.setOutputValueClass(BytesWritable.class);
-    
+
     jobConf.setMapperClass(IdentityMapper.class);        
     jobConf.setReducerClass(IdentityReducer.class);
-    
+
     JobClient client = new JobClient(jobConf);
     ClusterStatus cluster = client.getClusterStatus();
     int num_maps = cluster.getTaskTrackers() * 
-      jobConf.getInt("test.sort.maps_per_host", 10);
+    jobConf.getInt("test.sort.maps_per_host", 10);
     int num_reduces = cluster.getTaskTrackers() * 
-      jobConf.getInt("test.sort.reduces_per_host", cluster.getMaxTasks());
+    jobConf.getInt("test.sort.reduces_per_host", cluster.getMaxTasks());
     List<String> otherArgs = new ArrayList<String>();
     for(int i=0; i < args.length; ++i) {
       try {
@@ -80,41 +84,46 @@
         }
       } catch (NumberFormatException except) {
         System.out.println("ERROR: Integer expected instead of " + args[i]);
-        printUsage();
+        return printUsage();
       } catch (ArrayIndexOutOfBoundsException except) {
         System.out.println("ERROR: Required parameter missing from " +
-                           args[i-1]);
-        printUsage(); // exits
+            args[i-1]);
+        return printUsage(); // exits
       }
     }
-    
+
     jobConf.setNumMapTasks(num_maps);
     jobConf.setNumReduceTasks(num_reduces);
-    
+
     // Make sure there are exactly 2 parameters left.
     if (otherArgs.size() != 2) {
       System.out.println("ERROR: Wrong number of parameters: " +
-                         otherArgs.size() + " instead of 2.");
-      printUsage();
+          otherArgs.size() + " instead of 2.");
+      return printUsage();
     }
-    jobConf.setInputPath(new Path((String) otherArgs.get(0)));
-    jobConf.setOutputPath(new Path((String) otherArgs.get(1)));
-    
-    // Uncomment to run locally in a single process
-    //job_conf.set("mapred.job.tracker", "local");
-    
+    jobConf.setInputPath(new Path(otherArgs.get(0)));
+    jobConf.setOutputPath(new Path(otherArgs.get(1)));
+
     System.out.println("Running on " +
-                       cluster.getTaskTrackers() +
-                       " nodes to sort from " + 
-                       jobConf.getInputPaths()[0] + " into " +
-                       jobConf.getOutputPath() + " with " + num_reduces + " reduces.");
+        cluster.getTaskTrackers() +
+        " nodes to sort from " + 
+        jobConf.getInputPaths()[0] + " into " +
+        jobConf.getOutputPath() + " with " + num_reduces + " reduces.");
     Date startTime = new Date();
     System.out.println("Job started: " + startTime);
     JobClient.runJob(jobConf);
     Date end_time = new Date();
     System.out.println("Job ended: " + end_time);
     System.out.println("The job took " + 
-                       (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
+        (end_time.getTime() - startTime.getTime()) /1000 + " seconds.");
+    return 0;
   }
-  
+
+
+
+  public static void main(String[] args) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new Sort(), args);
+    System.exit(res);
+  }
+
 }

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/WordCount.java Wed Sep  5 16:10:42 2007
@@ -18,20 +18,27 @@
 
 package org.apache.hadoop.examples;
 
-import java.io.*;
-import java.util.*;
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Iterator;
+import java.util.List;
+import java.util.StringTokenizer;
 
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.MapReduceBase;
 import org.apache.hadoop.mapred.Mapper;
 import org.apache.hadoop.mapred.OutputCollector;
 import org.apache.hadoop.mapred.Reducer;
 import org.apache.hadoop.mapred.Reporter;
-import org.apache.hadoop.mapred.MapReduceBase;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This is an example Hadoop Map/Reduce application.
@@ -42,7 +49,7 @@
  * To run: bin/hadoop jar build/hadoop-examples.jar wordcount
  *            [-m <i>maps</i>] [-r <i>reduces</i>] <i>in-dir</i> <i>out-dir</i> 
  */
-public class WordCount {
+public class WordCount extends Configured implements Tool {
   
   /**
    * Counts the words in each line.
@@ -84,9 +91,10 @@
     }
   }
   
-  static void printUsage() {
+  static int printUsage() {
     System.out.println("wordcount [-m <maps>] [-r <reduces>] <input> <output>");
-    System.exit(1);
+    ToolRunner.printGenericCommandUsage(System.out);
+    return -1;
   }
   
   /**
@@ -95,8 +103,8 @@
    * @throws IOException When there is communication problems with the 
    *                     job tracker.
    */
-  public static void main(String[] args) throws IOException {
-    JobConf conf = new JobConf(WordCount.class);
+  public int run(String[] args) throws Exception {
+    JobConf conf = new JobConf(getConf(), WordCount.class);
     conf.setJobName("wordcount");
  
     // the keys are words (strings)
@@ -120,26 +128,30 @@
         }
       } catch (NumberFormatException except) {
         System.out.println("ERROR: Integer expected instead of " + args[i]);
-        printUsage();
+        return printUsage();
       } catch (ArrayIndexOutOfBoundsException except) {
         System.out.println("ERROR: Required parameter missing from " +
                            args[i-1]);
-        printUsage(); // exits
+        return printUsage();
       }
     }
     // Make sure there are exactly 2 parameters left.
     if (other_args.size() != 2) {
       System.out.println("ERROR: Wrong number of parameters: " +
                          other_args.size() + " instead of 2.");
-      printUsage();
+      return printUsage();
     }
-    conf.setInputPath(new Path((String) other_args.get(0)));
-    conf.setOutputPath(new Path((String) other_args.get(1)));
-    
-    // Uncomment to run locally in a single process
-    // conf.set("mapred.job.tracker", "local");
-    
+    conf.setInputPath(new Path(other_args.get(0)));
+    conf.setOutputPath(new Path(other_args.get(1)));
+        
     JobClient.runJob(conf);
+    return 0;
   }
   
+  
+  public static void main(String[] args) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new WordCount(), args);
+    System.exit(res);
+  }
+
 }

Modified: lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java (original)
+++ lucene/hadoop/trunk/src/examples/org/apache/hadoop/examples/dancing/DistributedPentomino.java Wed Sep  5 16:10:42 2007
@@ -19,10 +19,13 @@
 package org.apache.hadoop.examples.dancing;
 
 import java.io.*;
-import java.util.*;
+import java.util.List;
+import java.util.StringTokenizer;
 
-import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.mapred.*;
@@ -38,7 +41,7 @@
  * the previous choice. That file is given as the input to
  * map/reduce. The output key/value are the move prefix/solution as Text/Text.
  */
-public class DistributedPentomino {
+public class DistributedPentomino extends Configured implements Tool {
 
   /**
    * Each map takes a line, which represents a prefix move and finds all of 
@@ -98,6 +101,7 @@
       pent.solve(prefix);
     }
     
+    @Override
     public void configure(JobConf conf) {
       depth = conf.getInt("pent.depth", -1);
       width = conf.getInt("pent.width", -1);
@@ -146,56 +150,58 @@
    * This takes about 2.5 hours on 20 nodes with 2 cpus/node.
    * Splits the job into 2000 maps and 1 reduce.
    */
-  public static void main(String[] args) throws IOException {
+  public static void main(String[] args) throws Exception {
+    int res = ToolRunner.run(new Configuration(), new DistributedPentomino(), args);
+    System.exit(res);
+  }
+
+  public int run(String[] args) throws Exception {
     JobConf conf;
     int depth = 5;
     int width = 9;
     int height = 10;
     Class pentClass;
     if (args.length == 0) {
-      System.out.println("pentomino <output> [conf]");
-      return;
-    }
-    if (args.length == 1) {
-      conf = new JobConf();
-      conf.setInt("pent.width", width);
-      conf.setInt("pent.height", height);
-      conf.setInt("pent.depth", depth);
-      pentClass = OneSidedPentomino.class;
-    } else {
-      conf = new JobConf(args[0]);
-      width = conf.getInt("pent.width", width);
-      height = conf.getInt("pent.height", height);
-      depth = conf.getInt("pent.depth", depth);
-      pentClass = conf.getClass("pent.class", OneSidedPentomino.class);
+      System.out.println("pentomino <output>");
+      ToolRunner.printGenericCommandUsage(System.out);
+      return -1;
     }
+    
+    conf = new JobConf(getConf());
+    width = conf.getInt("pent.width", width);
+    height = conf.getInt("pent.height", height);
+    depth = conf.getInt("pent.depth", depth);
+    pentClass = conf.getClass("pent.class", OneSidedPentomino.class);
+    
     Path output = new Path(args[0]);
     Path input = new Path(output + "_input");
-    conf.setInputPath(input);
-    conf.setOutputPath(output);
-    conf.setJarByClass(PentMap.class);
     FileSystem fileSys = FileSystem.get(conf);
-    conf.setJobName("dancingElephant");
-    Pentomino pent = (Pentomino) ReflectionUtils.newInstance(pentClass, conf);
-    pent.initialize(width, height);
-    createInputDirectory(fileSys, input, pent, depth);
- 
-    // the keys are the prefix strings
-    conf.setOutputKeyClass(Text.class);
-    // the values are puzzle solutions
-    conf.setOutputValueClass(Text.class);
-    
-    conf.setMapperClass(PentMap.class);        
-    conf.setReducerClass(IdentityReducer.class);
-    
-    conf.setNumMapTasks(2000);
-    conf.setNumReduceTasks(1);
-    
-    // Uncomment to run locally in a single process
-    //conf.set("mapred.job.tracker", "local");
-    
-    JobClient.runJob(conf);
-    fileSys.delete(input);
+    try {
+      conf.setInputPath(input);
+      conf.setOutputPath(output);
+      conf.setJarByClass(PentMap.class);
+      
+      conf.setJobName("dancingElephant");
+      Pentomino pent = (Pentomino) ReflectionUtils.newInstance(pentClass, conf);
+      pent.initialize(width, height);
+      createInputDirectory(fileSys, input, pent, depth);
+   
+      // the keys are the prefix strings
+      conf.setOutputKeyClass(Text.class);
+      // the values are puzzle solutions
+      conf.setOutputValueClass(Text.class);
+      
+      conf.setMapperClass(PentMap.class);        
+      conf.setReducerClass(IdentityReducer.class);
+      
+      conf.setNumMapTasks(2000);
+      conf.setNumReduceTasks(1);
+      
+      JobClient.runJob(conf);
+      } finally {
+      fileSys.delete(input);
+    }
+    return 0;
   }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java Wed Sep  5 16:10:42 2007
@@ -213,6 +213,8 @@
       System.out.println(refreshNodes);
       System.out.println(upgradeProgress);
       System.out.println(help);
+      System.out.println();
+      ToolRunner.printGenericCommandUsage(System.out);
     }
 
   }
@@ -325,6 +327,8 @@
       System.err.println("           [-upgradeProgress status | details | force]");
       System.err.println("           [-metasave filename]");
       System.err.println("           [-help [cmd]]");
+      System.err.println();
+      ToolRunner.printGenericCommandUsage(System.err);
     }
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java Wed Sep  5 16:10:42 2007
@@ -89,6 +89,7 @@
       System.err.println("\t-files\tprint out files being checked");
       System.err.println("\t-blocks\tprint out block report");
       System.err.println("\t-locations\tprint out locations for every block");
+      ToolRunner.printGenericCommandUsage(System.err);
       return -1;
     }
     StringBuffer url = new StringBuffer("http://"+fsName+"/fsck?path=");

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java Wed Sep  5 16:10:42 2007
@@ -1216,9 +1216,6 @@
                          " [-stat [format] <path>]");
     } else {
       System.err.println("Usage: java FsShell");
-      System.err.println("           [-fs <local | file system URI>]");
-      System.err.println("           [-conf <configuration file>]");
-      System.err.println("           [-D <[property=value>]");
       System.err.println("           [-ls <path>]");
       System.err.println("           [-lsr <path>]");
       System.err.println("           [-du <path>]");
@@ -1242,6 +1239,8 @@
       System.err.println("           [-test -[ezd] <path>]");
       System.err.println("           [-stat [format] <path>]");
       System.err.println("           [-help [cmd]]");
+      System.err.println();
+      ToolRunner.printGenericCommandUsage(System.err);
     }
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java Wed Sep  5 16:10:42 2007
@@ -62,6 +62,7 @@
     if (args.length == 0) {
       System.err.println("Usage: MigrationTool <S3 file system URI>");
       System.err.println("\t<S3 file system URI>\tfilesystem to migrate");
+      ToolRunner.printGenericCommandUsage(System.err);
       return -1;
     }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Wed Sep  5 16:10:42 2007
@@ -768,6 +768,7 @@
     System.out.printf("\t-kill\t<job-id>\n");
     System.out.printf("\t-events\t<job-id> <from-event-#> <#-of-events>\n");
     System.out.printf("\t-list\n\n");
+    ToolRunner.printGenericCommandUsage(System.out);
     throw new RuntimeException("JobClient: bad command-line arguments");
   }
     

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/lib/aggregate/ValueAggregatorJob.java Wed Sep  5 16:10:42 2007
@@ -21,17 +21,18 @@
 import java.io.IOException;
 import java.util.ArrayList;
 
+import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.InputFormat;
+import org.apache.hadoop.mapred.JobClient;
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.SequenceFileInputFormat;
 import org.apache.hadoop.mapred.TextInputFormat;
 import org.apache.hadoop.mapred.TextOutputFormat;
 import org.apache.hadoop.mapred.jobcontrol.Job;
 import org.apache.hadoop.mapred.jobcontrol.JobControl;
-import org.apache.hadoop.mapred.JobClient;
-import org.apache.hadoop.mapred.RunningJob;
+import org.apache.hadoop.util.GenericOptionsParser;
 
 /**
  * This is the main class for creating a map/reduce job using Aggregate
@@ -90,17 +91,26 @@
   /**
    * Create an Aggregate based map/reduce job.
    * 
-   * @param args the arguments used for job creation
+   * @param args the arguments used for job creation. Generic hadoop
+   * arguments are accepted.
    * @return a JobConf object ready for submission.
    * 
    * @throws IOException
+   * @see GenericOptionsParser
    */
   public static JobConf createValueAggregatorJob(String args[])
     throws IOException {
 
+    Configuration conf = new Configuration();
+    
+    GenericOptionsParser genericParser 
+      = new GenericOptionsParser(conf, args);
+    args = genericParser.getRemainingArgs();
+    
     if (args.length < 2) {
       System.out.println("usage: inputDirs outDir "
           + "[numOfReducer [textinputformat|seq [specfile [jobName]]]]");
+      GenericOptionsParser.printGenericCommandUsage(System.out);
       System.exit(1);
     }
     String inputDir = args[0];
@@ -131,7 +141,7 @@
       jobName = args[5];
     }
     
-    JobConf theJob = new JobConf();
+    JobConf theJob = new JobConf(conf);
     if (specFile != null) {
       theJob.addDefaultResource(specFile);
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Wed Sep  5 16:10:42 2007
@@ -39,8 +39,8 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FSDataOutputStream;
-import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.SequenceFile;
@@ -828,6 +828,7 @@
         logPath = new Path(toURI(args[++idx]).getPath());
       } else {
         System.out.println(usage);
+        ToolRunner.printGenericCommandUsage(System.out);
         return -1;
       }
     }
@@ -835,6 +836,7 @@
     // mandatory command-line parameters
     if (srcPath == null || destPath == null) {
       System.out.println(usage);
+      ToolRunner.printGenericCommandUsage(System.out);
       return -1;
     }
   

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java Wed Sep  5 16:10:42 2007
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.util;
 
+import java.io.PrintStream;
+
 import org.apache.commons.cli.CommandLine;
 import org.apache.commons.cli.CommandLineParser;
 import org.apache.commons.cli.GnuParser;
@@ -210,4 +212,14 @@
     return args;
   }
 
+  public static void printGenericCommandUsage(PrintStream out) {
+    out.println("Generic options supported are");
+    out.println("-conf <configuration file>     specify an application configuration file");
+    out.println("-D <property=value>            use value for given property");
+    out.println("-fs <local|namenode:port>      specify a namenod");
+    out.println("-jt <local|jobtracker:port>    specify a job tracker\n");
+    out.println("The general command line syntax is");
+    out.println("bin/hadoop command [genericOptions] [commandOptions]\n");
+  }
+  
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java Wed Sep  5 16:10:42 2007
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.util;
 
+import java.io.PrintStream;
+
 import org.apache.hadoop.conf.Configuration;
 
 /**
@@ -63,6 +65,11 @@
   public static int run(Tool tool, String[] args) 
     throws Exception{
     return run(tool.getConf(), tool, args);
+  }
+  
+  /** Delegates to GenericOptionsParser#printGenericCommandUsage() */
+  public static void printGenericCommandUsage(PrintStream out) {
+    GenericOptionsParser.printGenericCommandUsage(out);
   }
   
 }

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java?rev=573095&r1=573094&r2=573095&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/mapred/BigMapOutput.java Wed Sep  5 16:10:42 2007
@@ -18,21 +18,26 @@
 
 package org.apache.hadoop.mapred;
 
-import java.io.*;
-import java.util.*;
+import java.io.IOException;
+import java.util.Date;
+import java.util.Random;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
-import org.apache.hadoop.mapred.SortValidator.RecordStatsChecker.*;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.io.SequenceFile;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.mapred.lib.*;
-import org.apache.hadoop.util.ToolBase;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.mapred.SortValidator.RecordStatsChecker.NonSplitableSequenceFileInputFormat;
+import org.apache.hadoop.mapred.lib.IdentityMapper;
+import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
-public class BigMapOutput extends ToolBase {
+public class BigMapOutput extends Configured implements Tool {
   public static final Log LOG =
     LogFactory.getLog(BigMapOutput.class.getName());
   private static Random random = new Random();
@@ -95,6 +100,7 @@
   private static void usage() {
     System.err.println("BigMapOutput -input <input-dir> -output <output-dir> " +
                        "[-create <filesize in MB>]");
+    ToolRunner.printGenericCommandUsage(System.err);
     System.exit(1);
   }
   public int run(String[] args) throws Exception {    
@@ -118,8 +124,8 @@
       }
     }
     
-    FileSystem fs = FileSystem.get(conf);
-    JobConf jobConf = new JobConf(conf, BigMapOutput.class);
+    FileSystem fs = FileSystem.get(getConf());
+    JobConf jobConf = new JobConf(getConf(), BigMapOutput.class);
 
     jobConf.setJobName("BigMapOutput");
     jobConf.setInputFormat(NonSplitableSequenceFileInputFormat.class);
@@ -148,7 +154,7 @@
   }
 
   public static void main(String argv[]) throws Exception {
-    int res = new BigMapOutput().doMain(new Configuration(), argv);
+    int res = ToolRunner.run(new Configuration(), new BigMapOutput(), argv);
     System.exit(res);
   }