You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/06/26 23:11:38 UTC

svn commit: r417298 - in /lucene/hadoop/trunk: ./ lib/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/mapred/ src/java/org/apache/hadoop/util/

Author: cutting
Date: Mon Jun 26 14:11:38 2006
New Revision: 417298

URL: http://svn.apache.org/viewvc?rev=417298&view=rev
Log:
HADOOP-59.  Add support for generic command line options.  Contributed by Hairong.

Added:
    lucene/hadoop/trunk/lib/commons-cli-2.0-SNAPSHOT.jar   (with props)
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=417298&r1=417297&r2=417298&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Jun 26 14:11:38 2006
@@ -60,6 +60,12 @@
     This improves performance when tasks are short.
     (Mahadev Konar via cutting)
 
+15. HADOOP-59.  Add support for generic command line options.  One may
+    now specify the filesystem (-fs), the MapReduce jobtracker (-jt),
+    a config file (-conf) or any configuration property (-D).  The
+    "dfs", "fsck", "job", and "distcp" commands currently support
+    this, with more to be added.  (Hairong Kuang via cutting)
+
 
 Release 0.3.2 - 2006-06-09
 

Added: lucene/hadoop/trunk/lib/commons-cli-2.0-SNAPSHOT.jar
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/lib/commons-cli-2.0-SNAPSHOT.jar?rev=417298&view=auto
==============================================================================
Binary file - no diff available.

Propchange: lucene/hadoop/trunk/lib/commons-cli-2.0-SNAPSHOT.jar
------------------------------------------------------------------------------
    svn:mime-type = application/octet-stream

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java?rev=417298&r1=417297&r2=417298&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSShell.java Mon Jun 26 14:11:38 2006
@@ -19,13 +19,14 @@
 
 import org.apache.hadoop.conf.*;
 import org.apache.hadoop.fs.*;
+import org.apache.hadoop.util.ToolBase;
 
 /**************************************************
  * This class provides some DFS administrative access.
  *
  * @author Mike Cafarella
  **************************************************/
-public class DFSShell {
+public class DFSShell extends ToolBase {
 
     // required for unknown reason to make WritableFactories work distributed
     static { new DatanodeInfo(); }
@@ -34,10 +35,12 @@
 
     /**
      */
-    public DFSShell(FileSystem fs) {
-        this.fs = fs;
+    public DFSShell() {
     }
 
+    public void init() throws IOException {
+        this.fs = FileSystem.get(conf);
+    }
     /**
      * Add a local file to the indicated name in DFS. src is kept.
      */
@@ -292,64 +295,71 @@
     }
 
     /**
-     * main() has some simple utility methods
+     * run
      */
-    public static void main(String argv[]) throws IOException {
+    public int run( String argv[] ) throws Exception {
         if (argv.length < 1) {
-            System.out.println("Usage: java DFSShell [-local | -dfs <namenode:port>]" +
+            System.out.println("Usage: java DFSShell [-fs <local | namenode:port>]"+
+                    " [-conf <configuration file>] [-D <[property=value>]"+
                     " [-ls <path>] [-lsr <path>] [-du <path>] [-mv <src> <dst>] [-cp <src> <dst>] [-rm <src>]" +
                     " [-put <localsrc> <dst>] [-copyFromLocal <localsrc> <dst>] [-moveFromLocal <localsrc> <dst>]" + 
                     " [-get <src> <localdst>] [-cat <src>] [-copyToLocal <src> <localdst>] [-moveToLocal <src> <localdst>]" +
                     " [-mkdir <path>] [-report] [-setrep [-R] <rep> <path/file>]");
-            return;
+            return -1;
         }
 
-        Configuration conf = new Configuration();
+        // initialize DFSShell
+        init();
+        
+        int exitCode = -1;
         int i = 0;
-        FileSystem fs = FileSystem.parseArgs(argv, i, conf);
         String cmd = argv[i++];
         try {
-            DFSShell tc = new DFSShell(fs);
-
             if ("-put".equals(cmd) || "-copyFromLocal".equals(cmd)) {
-                tc.copyFromLocal(new Path(argv[i++]), argv[i++]);
+                copyFromLocal(new Path(argv[i++]), argv[i++]);
             } else if ("-moveFromLocal".equals(cmd)) {
-                tc.moveFromLocal(new Path(argv[i++]), argv[i++]);
+                moveFromLocal(new Path(argv[i++]), argv[i++]);
             } else if ("-get".equals(cmd) || "-copyToLocal".equals(cmd)) {
-                tc.copyToLocal(argv[i++], new Path(argv[i++]));
+                copyToLocal(argv[i++], new Path(argv[i++]));
             } else if ("-cat".equals(cmd)) {
-                tc.cat(argv[i++]);
+                cat(argv[i++]);
             } else if ("-moveToLocal".equals(cmd)) {
-                tc.moveToLocal(argv[i++], new Path(argv[i++]));
+                moveToLocal(argv[i++], new Path(argv[i++]));
             } else if ("-setrep".equals(cmd)) {
-            		tc.setReplication(argv, i);           
+            	setReplication(argv, i);           
             } else if ("-ls".equals(cmd)) {
                 String arg = i < argv.length ? argv[i++] : "";
-                tc.ls(arg, false);
+                ls(arg, false);
             } else if ("-lsr".equals(cmd)) {
                 String arg = i < argv.length ? argv[i++] : "";
-                tc.ls(arg, true);
+                ls(arg, true);
             } else if ("-mv".equals(cmd)) {
-                tc.rename(argv[i++], argv[i++]);
+                rename(argv[i++], argv[i++]);
             } else if ("-cp".equals(cmd)) {
-                tc.copy(argv[i++], argv[i++], conf);
+                copy(argv[i++], argv[i++], conf);
             } else if ("-rm".equals(cmd)) {
-                tc.delete(argv[i++]);
+                delete(argv[i++]);
             } else if ("-du".equals(cmd)) {
                 String arg = i < argv.length ? argv[i++] : "";
-                tc.du(arg);
+                du(arg);
             } else if ("-mkdir".equals(cmd)) {
-                tc.mkdir(argv[i++]);
+                mkdir(argv[i++]);
             } else if ("-report".equals(cmd)) {
-                tc.report();
+                report();
             }
-            System.exit(0);
+            exitCode = 0;;
         } catch (IOException e ) {
           System.err.println( cmd.substring(1) + ": " + e.getLocalizedMessage() );  
-          System.exit(-1);
         } finally {
             fs.close();
         }
+        return exitCode;
     }
 
+    /**
+     * main() has some simple utility methods
+     */
+    public static void main(String argv[]) throws IOException {
+        new DFSShell().doMain(new Configuration(), argv);
+    }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java?rev=417298&r1=417297&r2=417298&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java Mon Jun 26 14:11:38 2006
@@ -31,6 +31,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.fs.FSOutputStream;
 import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.util.ToolBase;
 
 /**
  * This class provides rudimentary checking of DFS volumes for errors and
@@ -56,7 +57,7 @@
  *  
  * @author Andrzej Bialecki
  */
-public class DFSck {
+public class DFSck extends ToolBase {
   private static final Log LOG = LogFactory.getLog(DFSck.class.getName());
 
   /** Don't attempt any fixing . */
@@ -70,11 +71,13 @@
   private UTF8 lostFound = null;
   private boolean lfInited = false;
   private boolean lfInitedOk = false;
-  private Configuration conf;
   private boolean showFiles = false;
   private boolean showBlocks = false;
   private boolean showLocations = false;
   private int fixing;
+ 
+  DFSck() {
+  }
   
   /**
    * Filesystem checker.
@@ -86,16 +89,21 @@
    * @throws Exception
    */
   public DFSck(Configuration conf, int fixing, boolean showFiles, boolean showBlocks, boolean showLocations) throws Exception {
-    this.conf = conf;
-    this.fixing = fixing;
-    this.showFiles = showFiles;
-    this.showBlocks = showBlocks;
-    this.showLocations = showLocations;
-    String fsName = conf.get("fs.default.name", "local");
-    if (fsName.equals("local")) {
-      throw new Exception("This tool only checks DFS, but your config uses 'local' FS.");
-    }
-    this.dfs = new DFSClient(DataNode.createSocketAddr(fsName), conf);
+    setConf(conf);
+    init(fixing, showFiles, showBlocks, showLocations);
+  }
+  
+  public void init(int fixing, boolean showFiles, 
+          boolean showBlocks, boolean showLocations) throws IOException {
+      String fsName = conf.get("fs.default.name", "local");
+      if (fsName.equals("local")) {
+        throw new IOException("This tool only checks DFS, but your config uses 'local' FS.");
+      }
+      this.dfs = new DFSClient(DataNode.createSocketAddr(fsName), conf);
+      this.fixing = fixing;
+      this.showFiles = showFiles;
+      this.showBlocks = showBlocks;
+      this.showLocations = showLocations;
   }
   
   /**
@@ -405,7 +413,7 @@
   /**
    * @param args
    */
-  public static void main(String[] args) throws Exception {
+  public int run(String[] args) throws Exception {
     if (args.length == 0) {
       System.err.println("Usage: DFSck <path> [-move | -delete] [-files] [-blocks [-locations]]");
       System.err.println("\t<path>\tstart checking from this path");
@@ -414,9 +422,8 @@
       System.err.println("\t-files\tprint out files being checked");
       System.err.println("\t-blocks\tprint out block report");
       System.err.println("\t-locations\tprint out locations for every block");
-      return;
+      return -1;
     }
-    Configuration conf = new Configuration();
     String path = args[0];
     boolean showFiles = false;
     boolean showBlocks = false;
@@ -429,8 +436,8 @@
       if (args[i].equals("-move")) fixing = FIXING_MOVE;
       if (args[i].equals("-delete")) fixing = FIXING_DELETE;
     }
-    DFSck fsck = new DFSck(conf, fixing, showFiles, showBlocks, showLocations);
-    Result res = fsck.fsck(path);
+    init(fixing, showFiles, showBlocks, showLocations);
+    Result res = fsck(path);
     System.out.println();
     System.out.println(res);
     if (res.isHealthy()) {
@@ -438,6 +445,11 @@
     } else {
       System.out.println("\n\nThe filesystem under path '" + args[0] + "' is CORRUPT");
     }
+    return 0;
+  }
+
+  public static void main(String[] args) throws Exception {
+      new DFSck().doMain(new Configuration(), args);
   }
 
   /**
@@ -596,5 +608,6 @@
       this.corruptFiles = corruptFiles;
     }
   }
+
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=417298&r1=417297&r2=417298&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Mon Jun 26 14:11:38 2006
@@ -34,7 +34,7 @@
  *
  * @author Mike Cafarella
  *******************************************************/
-public class JobClient implements MRConstants {
+public class JobClient extends ToolBase implements MRConstants  {
     private static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobClient");
 
     static long MAX_JOBPROFILE_AGE = 1000 * 2;
@@ -170,22 +170,28 @@
     JobSubmissionProtocol jobSubmitClient;
     FileSystem fs = null;
 
-    private Configuration conf;
     static Random r = new Random();
 
     /**
      * Build a job client, connect to the default job tracker
      */
+    public JobClient() {
+    }
+    
     public JobClient(Configuration conf) throws IOException {
-      this.conf = conf;
-      String tracker = conf.get("mapred.job.tracker", "local");
-      if ("local".equals(tracker)) {
-        this.jobSubmitClient = new LocalJobRunner(conf);
-      } else {
-        this.jobSubmitClient = (JobSubmissionProtocol) 
-          RPC.getProxy(JobSubmissionProtocol.class,
-                       JobTracker.getAddress(conf), conf);
-      }
+        setConf(conf);
+        init();
+    }
+    
+    public void init() throws IOException {
+        String tracker = conf.get("mapred.job.tracker", "local");
+        if ("local".equals(tracker)) {
+          this.jobSubmitClient = new LocalJobRunner(conf);
+        } else {
+          this.jobSubmitClient = (JobSubmissionProtocol) 
+            RPC.getProxy(JobSubmissionProtocol.class,
+                         JobTracker.getAddress(conf), conf);
+        }        
     }
   
     /**
@@ -382,14 +388,16 @@
     }
         
 
-    /**
-     */
-    public static void main(String argv[]) throws IOException {
+    public int run(String[] argv) throws Exception {
+        // TODO Auto-generated method stub
         if (argv.length < 2) {
             System.out.println("JobClient -submit <job> | -status <id> | -kill <id> [-jt <jobtracker:port>|<config>]");
             System.exit(-1);
         }
 
+        // initialize JobClient
+        init();
+        
         // Process args
         String jobTrackerSpec = null;
         String submitJobFile = null;
@@ -398,10 +406,7 @@
         boolean killJob = false;
 
         for (int i = 0; i < argv.length; i++) {
-            if ("-jt".equals(argv[i])) {
-                jobTrackerSpec = argv[i+1];
-                i++;
-            } else if ("-submit".equals(argv[i])) {
+            if ("-submit".equals(argv[i])) {
                 submitJobFile = argv[i+1];
                 i++;
             } else if ("-status".equals(argv[i])) {
@@ -416,31 +421,40 @@
         }
 
         // Submit the request
-        JobClient jc = new JobClient(getConfiguration(jobTrackerSpec));
+        int exitCode = -1;
         try {
             if (submitJobFile != null) {
-                RunningJob job = jc.submitJob(submitJobFile);
+                RunningJob job = submitJob(submitJobFile);
                 System.out.println("Created job " + job.getJobID());
             } else if (getStatus) {
-                RunningJob job = jc.getJob(jobid);
+                RunningJob job = getJob(jobid);
                 if (job == null) {
                     System.out.println("Could not find job " + jobid);
                 } else {
                     System.out.println();
                     System.out.println(job);
+                    exitCode = 0;
                 }
             } else if (killJob) {
-                RunningJob job = jc.getJob(jobid);
+                RunningJob job = getJob(jobid);
                 if (job == null) {
                     System.out.println("Could not find job " + jobid);
                 } else {
                     job.killJob();
                     System.out.println("Killed job " + jobid);
+                    exitCode = 0;
                 }
             }
         } finally {
-            jc.close();
+            close();
         }
+        return exitCode;
+    }
+    
+    /**
+     */
+    public static void main(String argv[]) throws IOException {
+        new JobClient().doMain(new Configuration(), argv);
     }
 }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?rev=417298&r1=417297&r2=417298&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Mon Jun 26 14:11:38 2006
@@ -51,15 +51,25 @@
  *
  * @author Milind Bhandarkar
  */
-public class CopyFiles extends MapReduceBase implements Reducer {
+public class CopyFiles extends ToolBase {
   
-  private static final String usage = "distcp <srcurl> <desturl> "+
-          "[-dfs <namenode:port | local> ] [-jt <jobtracker:port | local>] " +
-          "[-config <config-file.xml>]";
+  private static final String usage = "distcp "+
+          "[-fs <namenode:port | local> ] [-jt <jobtracker:port | local>] " +
+          "[-conf <config-file.xml>] " + "[-D <property=value>] "+
+          "<srcurl> <desturl>";
   
   private static final long MIN_BYTES_PER_MAP = 1L << 28;
   private static final int MAX_NUM_MAPS = 10000;
   private static final int MAX_MAPS_PER_NODE = 10;
+  
+  public void setConf(Configuration conf) {
+      if (conf instanceof JobConf) {
+        this.conf = (JobConf) conf;
+      } else {
+        this.conf = new JobConf(conf);
+      }
+  }
+  
   /**
    * Mappper class for Copying files.
    */
@@ -154,11 +164,13 @@
     }
   }
   
-  public void reduce(WritableComparable key,
-      Iterator values,
-      OutputCollector output,
-      Reporter reporter) throws IOException {
-    // nothing
+  public static class CopyFilesReducer extends MapReduceBase implements Reducer {
+      public void reduce(WritableComparable key,
+                         Iterator values,
+                         OutputCollector output,
+                         Reporter reporter) throws IOException {
+          // nothing
+      }
   }
   
   private static String getFileSysName(URI url) {
@@ -208,47 +220,25 @@
    * input files. The mapper actually copies the files allotted to it. And
    * the reduce is empty.
    */
-  public static void main(String[] args) throws IOException {
-
-    Configuration conf = new Configuration();
+  public int run(String[] args) throws IOException {
     String srcPath = null;
     String destPath = null;
     
     for (int idx = 0; idx < args.length; idx++) {
-        if ("-dfs".equals(args[idx])) {
-            if (idx == (args.length-1)) {
-                System.out.println(usage);
-                return;
-            }
-            conf.set("fs.default.name", args[++idx]);
-        } else if ("-jt".equals(args[idx])) {
-            if (idx == (args.length-1)) {
-                System.out.println(usage);
-                return;
-            }
-            conf.set("mapred.job.tracker", args[++idx]);
-        } else if ("-config".equals(args[idx])) {
-            if (idx == (args.length-1)) {
-                System.out.println(usage);
-                return;
-            }
-            conf.addFinalResource(new Path(args[++idx]));
-        } else {
-            if (srcPath == null) {
+        if (srcPath == null) {
                 srcPath = args[idx];
-            } else if (destPath == null) {
+        } else if (destPath == null) {
                 destPath = args[idx];
-            } else {
+        } else {
                 System.out.println(usage);
-                return;
-            }
+                return -1;
         }
     }
     
     // mandatory command-line parameters
     if (srcPath == null || destPath == null) {
         System.out.println(usage);
-        return;
+        return -1;
     }
     
     URI srcurl = null;
@@ -260,7 +250,8 @@
       throw new RuntimeException("URL syntax error.", ex);
     }
     
-    JobConf jobConf = new JobConf(conf, CopyFiles.class);
+    JobConf jobConf = (JobConf)conf;
+    
     jobConf.setJobName("copy-files");
     
     String srcFileSysName = getFileSysName(srcurl);
@@ -270,8 +261,8 @@
     jobConf.set("copy.dest.fs", destFileSysName);
     FileSystem srcfs;
    
-    srcfs = FileSystem.getNamed(srcFileSysName, conf);
-    FileSystem destfs = FileSystem.getNamed(destFileSysName, conf);
+    srcfs = FileSystem.getNamed(srcFileSysName, jobConf);
+    FileSystem destfs = FileSystem.getNamed(destFileSysName, jobConf);
  
     srcPath = srcurl.getPath();
     if ("".equals(srcPath)) { srcPath = "/"; }
@@ -293,7 +284,7 @@
     
     if (!srcfs.exists(tmpPath)) {
       System.out.println(srcPath+" does not exist.");
-      return;
+      return -1;
     }
     
     // turn off speculative execution, because DFS doesn't handle
@@ -308,7 +299,7 @@
     jobConf.setOutputFormat(SequenceFileOutputFormat.class);
     
     jobConf.setMapperClass(CopyFilesMapper.class);
-    jobConf.setReducerClass(CopyFiles.class);
+    jobConf.setReducerClass(CopyFilesReducer.class);
     
     jobConf.setNumReduceTasks(1);
 
@@ -371,11 +362,20 @@
     }
     finalPathList = null;
     
+    int exitCode = -1;
     try {
       JobClient.runJob(jobConf);
+      exitCode = 0;
     } finally {
       fileSys.delete(tmpDir);
     }
   
+    return exitCode;
+  }
+  
+  public static void main(String[] args) throws IOException {
+      new CopyFiles().doMain(
+              new JobConf(new Configuration(), CopyFiles.class), 
+              args);
   }
 }

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java?rev=417298&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java Mon Jun 26 14:11:38 2006
@@ -0,0 +1,35 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import org.apache.hadoop.conf.Configurable;
+
+/**
+ * A tool interface that support generic options handling
+ * 
+ * @author hairong
+ *
+ */
+public interface Tool extends Configurable {
+    /**
+     * execute the command with the given arguments
+     * @param args command specific arguments
+     * @return exit code
+     * @throws Exception
+     */
+    int run( String [] args ) throws Exception;
+}

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java?rev=417298&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java Mon Jun 26 14:11:38 2006
@@ -0,0 +1,191 @@
+/**
+ * Copyright 2005 The Apache Software Foundation
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.util;
+
+import java.io.IOException;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+
+/*************************************************************
+ * This is a base class to support generic commonad options.
+ * Generic command options allow a user to specify a namenode,
+ * a job tracker etc. Generic options supported are
+ * -conf <configuration file>     specify an application configuration file
+ * -D <property=value>            use value for given property
+ * -fs <local|namenode:port>      specify a namenode
+ * -jt <local|jobtracker:port>    specify a job tracker
+ * 
+ * The general command line syntax is
+ * bin/hadoop command [genericOptions] [commandOptions]
+ * 
+ * For every tool that inherits from ToolBase, generic options are 
+ * handled by ToolBase while command options are passed to the tool.
+ * Generic options handling is implemented using Common CLI.
+ * 
+ * Tools that inherit from ToolBase in Hadoop are
+ * DFSShell, DFSck, JobClient, and CopyFiles.
+ * 
+ * Examples using generic options are
+ * bin/hadoop dfs -fs darwin:8020 -ls /data
+ *     list /data directory in dfs with namenode darwin:8020
+ * bin/hadoop dfs -D fs.default.name=darwin:8020 -ls /data
+ *     list /data directory in dfs with namenode darwin:8020
+ * bin/hadoop dfs -conf hadoop-site.xml -ls /data
+ *     list /data directory in dfs with conf specified in hadoop-site.xml
+ * bin/hadoop job -D mapred.job.tracker=darwin:50020 -submit job.xml
+ *     submit a job to job tracker darwin:50020
+ * bin/hadoop job -jt darwin:50020 -submit job.xml
+ *     submit a job to job tracker darwin:50020
+ * bin/hadoop job -jt local -submit job.xml
+ *     submit a job to local runner
+ *        
+ * @author hairong
+ *
+ */
+public abstract class ToolBase implements Tool {
+    public static final Log LOG = LogFactory.getLog(
+            "org.apache.hadoop.util.ToolBase");
+    public Configuration conf;
+
+    public void setConf(Configuration conf) {
+        this.conf = conf;
+    }
+
+    public Configuration getConf() {
+        return conf;
+    }
+    
+    /*
+     * Specify properties of each generic option
+     */
+    static private Options buildGeneralOptions() {
+        Option fs = OptionBuilder.withArgName("local|namenode:port")
+                                 .hasArg()
+                                 .withDescription("specify a namenode")
+                                 .create("fs");
+        Option jt = OptionBuilder.withArgName("local|jobtracker:port")
+                                 .hasArg()
+                                 .withDescription("specify a job tracker")
+                                 .create("jt");
+        Option oconf = OptionBuilder.withArgName("configuration file")
+                .hasArg()
+                .withDescription("specify an application configuration file" )
+                .create("conf");
+        Option property = OptionBuilder.withArgName("property=value")
+                              .hasArgs()
+                              .withArgPattern("=", 1)
+                              .withDescription("use value for given property")
+                              .create('D');
+        Options opts = new Options();
+        opts.addOption(fs);
+        opts.addOption(jt);
+        opts.addOption(oconf);
+        opts.addOption(property);
+        
+        return opts;
+    }
+    
+    /*
+     * Modify configuration according user-specified generic options
+     * @param conf Configuration to be modified
+     * @param line User-specified generic options
+     */
+    static private void processGeneralOptions( Configuration conf,
+                                               CommandLine line ) {
+        if(line.hasOption("fs")) {
+            conf.set("fs.default.name", line.getOptionValue("fs"));
+        }
+        
+        if(line.hasOption("jt")) {
+            conf.set("mapred.job.tracker", line.getOptionValue("jt"));
+        }
+        if(line.hasOption("conf")) {
+            conf.addFinalResource(new Path(line.getOptionValue("conf")));
+        }
+        if(line.hasOption('D')) {
+            String[] property = line.getOptionValues('D');
+            for(int i=0; i<property.length-1; i=i+2) {
+                if(property[i]!=null)
+                    conf.set(property[i], property[i+1]);
+            }
+         }           
+    }
+ 
+    /**
+     * Parse the user-specified options, get the generic options, and modify
+     * configuration accordingly
+     * @param conf Configuration to be modified
+     * @param args User-specified arguments
+     * @return Commoand-specific arguments
+     */
+    static private String[] parseGeneralOptions( Configuration conf, 
+                 String[] args ) {
+        Options opts = buildGeneralOptions();
+        CommandLineParser parser = new GnuParser();
+        try {
+          CommandLine line = parser.parse( opts, args, true );
+          processGeneralOptions( conf, line );
+          return line.getArgs();
+        } catch(ParseException e) {
+          LOG.warn("options parsing failed: "+e.getMessage());
+
+          HelpFormatter formatter = new HelpFormatter();
+          formatter.printHelp("general options are: ", opts);
+        }
+        return args;
+    }
+
+    /**
+     * Execute a command
+     * @param conf Application default configuration
+     * @param args User-specified arguments
+     * @return Exit code
+     * @throws Exception
+     */
+    public int executeCommand(Configuration conf, String[] args) throws Exception {
+        String [] commandOptions = parseGeneralOptions(conf, args);
+        setConf(conf);
+        return this.run(commandOptions);
+    }
+    /**
+     * Work as a main program: execute a command and handle exception if any
+     * @param conf Application default configuration
+     * @param args User-specified arguments
+     */
+    public final void doMain(Configuration conf, String[] args) {
+        try {
+            System.exit(executeCommand(conf, args));
+        }
+        catch (Exception e) {
+            LOG.warn(e.getMessage());
+            e.printStackTrace();
+            System.exit(-1);
+        }
+    }
+
+}