You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/08/20 22:41:12 UTC

svn commit: r567824 - in /lucene/hadoop/trunk: ./ src/contrib/streaming/src/test/org/apache/hadoop/streaming/ src/java/org/apache/hadoop/conf/ src/java/org/apache/hadoop/dfs/ src/java/org/apache/hadoop/fs/ src/java/org/apache/hadoop/fs/s3/ src/java/org...

Author: cutting
Date: Mon Aug 20 13:41:07 2007
New Revision: 567824

URL: http://svn.apache.org/viewvc?rev=567824&view=rev
Log:
HADOOP-1436.  Improved command line APIs.  Contributed by Enis Soztutar.

Added:
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configured.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java
    lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFsck.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
    lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Aug 20 13:41:07 2007
@@ -61,6 +61,10 @@
     HADOOP-1231.  Add generics to Mapper and Reducer interfaces.
     (tomwhite via cutting)
 
+    HADOOP-1436.  Improved command-line APIs, so that all tools need
+    not subclass ToolBase, and generic parameter parser is public.
+    (Enis Soztutar via cutting)
+
 
 Release 0.14.0 - 2007-08-17
 

Modified: lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java (original)
+++ lucene/hadoop/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestStreamedMerge.java Mon Aug 20 13:41:07 2007
@@ -22,7 +22,6 @@
 import java.io.File;
 import java.io.IOException;
 import java.io.InputStream;
-import java.io.LineNumberInputStream;
 import java.io.OutputStream;
 import java.net.ServerSocket;
 import java.net.Socket;
@@ -32,10 +31,11 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This JUnit test is not pure-Java and is not run as 
@@ -105,7 +105,7 @@
   void lsr() {
     try {
       System.out.println("lsr /");
-      new FsShell().doMain(conf_, new String[]{ "-lsr", "/" });
+      ToolRunner.run(conf_, new FsShell(), new String[]{ "-lsr", "/" });
     } catch (Exception e) {
       e.printStackTrace();
     }
@@ -156,6 +156,7 @@
         Socket client;
         InputStream in;
       
+        @Override
         InputStream connectInputStream() throws IOException {
           listen = new ServerSocket(SOC_PORT);
           client = listen.accept();
@@ -163,6 +164,7 @@
           return in;
         }
       
+        @Override
         void close() throws IOException
         {
           listen.close();
@@ -184,6 +186,7 @@
     
     abstract void close() throws IOException;
     
+    @Override
     public void run() {
       try {
         in_ = connectInputStream();

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configured.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configured.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configured.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/conf/Configured.java Mon Aug 20 13:41:07 2007
@@ -24,6 +24,11 @@
   private Configuration conf;
 
   /** Construct a Configured. */
+  public Configured() {
+    this(null);
+  }
+  
+  /** Construct a Configured. */
   public Configured(Configuration conf) {
     setConf(conf);
   }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSAdmin.java Mon Aug 20 13:41:07 2007
@@ -19,12 +19,13 @@
 
 import java.io.IOException;
 
-import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.ipc.RemoteException;
-import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.dfs.DistributedFileSystem.DiskStatus;
 import org.apache.hadoop.dfs.FSConstants.UpgradeAction;
+import org.apache.hadoop.fs.FsShell;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This class provides some DFS administrative access.
@@ -35,10 +36,17 @@
    * Construct a DFSAdmin object.
    */
   public DFSAdmin() {
-    super();
+    this(null);
   }
 
   /**
+   * Construct a DFSAdmin object.
+   */
+  public DFSAdmin(Configuration conf) {
+    super(conf);
+  }
+  
+  /**
    * Gives a report on how the FileSystem is doing.
    * @exception IOException if the filesystem does not exist.
    */
@@ -325,6 +333,7 @@
    * @exception Exception if the filesystem does not exist.
    * @return 0 on success, non zero on error.
    */
+  @Override
   public int run(String[] argv) throws Exception {
 
     if (argv.length < 1) {
@@ -442,7 +451,7 @@
    * @exception Exception if the filesystem does not exist.
    */
   public static void main(String[] argv) throws Exception {
-    int res = new DFSAdmin().doMain(new Configuration(), argv);
+    int res = ToolRunner.run(new DFSAdmin(), argv);
     System.exit(res);
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/dfs/DFSck.java Mon Aug 20 13:41:07 2007
@@ -25,10 +25,12 @@
 import java.net.URLConnection;
 import java.net.URLEncoder;
 
-import org.apache.commons.logging.*;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.util.ToolBase;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This class provides rudimentary checking of DFS volumes for errors and
@@ -53,7 +55,7 @@
  *  factors of each file.
  *  
  */
-public class DFSck extends ToolBase {
+public class DFSck extends Configured implements Tool {
   private static final Log LOG = LogFactory.getLog(DFSck.class.getName());
 
   DFSck() {}
@@ -64,13 +66,13 @@
    * @throws Exception
    */
   public DFSck(Configuration conf) throws Exception {
-    setConf(conf);
+    super(conf);
   }
   
   private String getInfoServer() throws IOException {
     InetSocketAddress addr = 
-      DataNode.createSocketAddr(conf.get("fs.default.name"));
-    int infoPort = conf.getInt("dfs.info.port", 50070);
+      DataNode.createSocketAddr(getConf().get("fs.default.name"));
+    int infoPort = getConf().getInt("dfs.info.port", 50070);
     return addr.getHostName() + ":" + infoPort;
   }
   
@@ -121,7 +123,7 @@
   }
 
   public static void main(String[] args) throws Exception {
-    int res = new DFSck().doMain(new Configuration(), args);
+    int res = ToolRunner.run(new DFSck(new Configuration()), args);
     System.exit(res);
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/FsShell.java Mon Aug 20 13:41:07 2007
@@ -18,16 +18,19 @@
 package org.apache.hadoop.fs;
 
 import java.io.*;
-import java.util.*;
 import java.text.DecimalFormat;
 import java.text.SimpleDateFormat;
+import java.util.*;
 
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.ipc.*;
-import org.apache.hadoop.util.ToolBase;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /** Provide command line access to a FileSystem. */
-public class FsShell extends ToolBase {
+public class FsShell extends Configured implements Tool {
 
   protected FileSystem fs;
   private Trash trash;
@@ -45,17 +48,22 @@
   /**
    */
   public FsShell() {
+    this(null);
+  }
+
+  public FsShell(Configuration conf) {
+    super(conf);
     fs = null;
     trash = null;
   }
-
+  
   protected void init() throws IOException {
-    conf.setQuietMode(true);
+    getConf().setQuietMode(true);
     if (this.fs == null) {
-      this.fs = FileSystem.get(conf);
+      this.fs = FileSystem.get(getConf());
     }
     if (this.trash == null) {
-      this.trash = new Trash(conf);
+      this.trash = new Trash(getConf());
     }
   }
 
@@ -64,7 +72,7 @@
    */
   private void copyBytes(InputStream in, OutputStream out) throws IOException {
     PrintStream ps = out instanceof PrintStream ? (PrintStream)out : null;
-    byte buf[] = new byte[conf.getInt("io.file.buffer.size", 4096)];
+    byte buf[] = new byte[getConf().getInt("io.file.buffer.size", 4096)];
     int bytesRead = in.read(buf);
     while (bytesRead >= 0) {
       out.write(buf, 0, bytesRead);
@@ -268,10 +276,10 @@
     for(int i=0; i<srcs.length; i++) {
       if (endline) {
         FileUtil.copyMerge(fs, srcs[i], 
-                           FileSystem.getLocal(conf), dst, false, conf, "\n");
+                           FileSystem.getLocal(getConf()), dst, false, getConf(), "\n");
       } else {
         FileUtil.copyMerge(fs, srcs[i], 
-                           FileSystem.getLocal(conf), dst, false, conf, null);
+                           FileSystem.getLocal(getConf()), dst, false, getConf(), null);
       }
     }
   }      
@@ -301,6 +309,7 @@
     //  zzz
 
     new DelayedExceptionThrowing() {
+      @Override
       void process(Path p) throws IOException {
         printToStdout(p);
       }
@@ -851,6 +860,7 @@
     //  rm: cannot remove `y.txt': No such file or directory
 
     new DelayedExceptionThrowing() {
+      @Override
       void process(Path p) throws IOException {
         delete(p, recursive);
       }
@@ -1321,49 +1331,49 @@
         else
           copyMergeToLocal(argv[i++], new Path(argv[i++]));
       } else if ("-cat".equals(cmd)) {
-        exitCode = doall(cmd, argv, conf, i);
+        exitCode = doall(cmd, argv, getConf(), i);
       } else if ("-moveToLocal".equals(cmd)) {
         moveToLocal(argv[i++], new Path(argv[i++]));
       } else if ("-setrep".equals(cmd)) {
         setReplication(argv, i);           
       } else if ("-ls".equals(cmd)) {
         if (i < argv.length) {
-          exitCode = doall(cmd, argv, conf, i);
+          exitCode = doall(cmd, argv, getConf(), i);
         } else {
           ls(Path.CUR_DIR, false);
         } 
       } else if ("-lsr".equals(cmd)) {
         if (i < argv.length) {
-          exitCode = doall(cmd, argv, conf, i);
+          exitCode = doall(cmd, argv, getConf(), i);
         } else {
           ls(Path.CUR_DIR, true);
         } 
       } else if ("-mv".equals(cmd)) {
-        exitCode = rename(argv, conf);
+        exitCode = rename(argv, getConf());
       } else if ("-cp".equals(cmd)) {
-        exitCode = copy(argv, conf);
+        exitCode = copy(argv, getConf());
       } else if ("-rm".equals(cmd)) {
-        exitCode = doall(cmd, argv, conf, i);
+        exitCode = doall(cmd, argv, getConf(), i);
       } else if ("-rmr".equals(cmd)) {
-        exitCode = doall(cmd, argv, conf, i);
+        exitCode = doall(cmd, argv, getConf(), i);
       } else if ("-expunge".equals(cmd)) {
         expunge();
       } else if ("-du".equals(cmd)) {
         if (i < argv.length) {
-          exitCode = doall(cmd, argv, conf, i);
+          exitCode = doall(cmd, argv, getConf(), i);
         } else {
           du("");
         }
       } else if ("-dus".equals(cmd)) {
         if (i < argv.length) {
-          exitCode = doall(cmd, argv, conf, i);
+          exitCode = doall(cmd, argv, getConf(), i);
         } else {
           dus("");
         }         
       } else if ("-mkdir".equals(cmd)) {
-        exitCode = doall(cmd, argv, conf, i);
+        exitCode = doall(cmd, argv, getConf(), i);
       } else if ("-touchz".equals(cmd)) {
-        exitCode = doall(cmd, argv, conf, i);
+        exitCode = doall(cmd, argv, getConf(), i);
       } else if ("-test".equals(cmd)) {
         exitCode = test(argv, i);
       } else if ("-stat".equals(cmd)) {
@@ -1426,7 +1436,7 @@
     FsShell shell = new FsShell();
     int res;
     try {
-      res = shell.doMain(new Configuration(), argv);
+      res = ToolRunner.run(shell, argv);
     } finally {
       shell.close();
     }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/fs/s3/MigrationTool.java Mon Aug 20 13:41:07 2007
@@ -26,9 +26,10 @@
 import java.util.Set;
 import java.util.TreeSet;
 
-import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
 import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.util.ToolBase;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 import org.jets3t.service.S3Service;
 import org.jets3t.service.S3ServiceException;
 import org.jets3t.service.impl.rest.httpclient.RestS3Service;
@@ -46,13 +47,13 @@
  * - no datafiles are touched.
  * </p>
  */
-public class MigrationTool extends ToolBase {
+public class MigrationTool extends Configured implements Tool {
   
   private S3Service s3Service;
   private S3Bucket bucket;
   
   public static void main(String[] args) throws Exception {
-    int res = new MigrationTool().doMain(new Configuration(), args);
+    int res = ToolRunner.run(new MigrationTool(), args);
     System.exit(res);
   }
   
@@ -66,10 +67,10 @@
     
     URI uri = URI.create(args[0]);
     
-    initialize(uri, conf);
+    initialize(uri);
     
     FileSystemStore newStore = new Jets3tFileSystemStore();
-    newStore.initialize(uri, conf);
+    newStore.initialize(uri, getConf());
     
     if (get("%2F") != null) { 
       System.err.println("Current version number is [unversioned].");
@@ -105,9 +106,9 @@
     
   }
   
-  public void initialize(URI uri, Configuration conf) throws IOException {
+  public void initialize(URI uri) throws IOException {
+    
     
-    this.conf = conf;
     
     try {
       String accessKey = null;
@@ -123,10 +124,10 @@
         }
       }
       if (accessKey == null) {
-        accessKey = conf.get("fs.s3.awsAccessKeyId");
+        accessKey = getConf().get("fs.s3.awsAccessKeyId");
       }
       if (secretAccessKey == null) {
-        secretAccessKey = conf.get("fs.s3.awsSecretAccessKey");
+        secretAccessKey = getConf().get("fs.s3.awsSecretAccessKey");
       }
       if (accessKey == null && secretAccessKey == null) {
         throw new IllegalArgumentException("AWS " +

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/mapred/JobClient.java Mon Aug 20 13:41:07 2007
@@ -17,18 +17,46 @@
  */
 package org.apache.hadoop.mapred;
 
-import org.apache.commons.logging.*;
-
-import org.apache.hadoop.fs.*;
-import org.apache.hadoop.io.*;
-import org.apache.hadoop.io.retry.*;
-import org.apache.hadoop.ipc.*;
-import org.apache.hadoop.conf.*;
-import org.apache.hadoop.util.*;
-import org.apache.hadoop.filecache.*;
-import java.io.*;
-import java.net.*;
-import java.util.*;
+import java.io.BufferedReader;
+import java.io.BufferedWriter;
+import java.io.DataInput;
+import java.io.DataOutput;
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.io.InputStreamReader;
+import java.io.OutputStream;
+import java.io.OutputStreamWriter;
+import java.net.InetSocketAddress;
+import java.net.SocketTimeoutException;
+import java.net.URI;
+import java.net.URL;
+import java.net.URLConnection;
+import java.util.Arrays;
+import java.util.Comparator;
+import java.util.HashMap;
+import java.util.Map;
+import java.util.Random;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.filecache.DistributedCache;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.io.DataOutputBuffer;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableUtils;
+import org.apache.hadoop.io.retry.RetryPolicies;
+import org.apache.hadoop.io.retry.RetryPolicy;
+import org.apache.hadoop.io.retry.RetryProxy;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.util.StringUtils;
+import org.apache.hadoop.util.Tool;
+import org.apache.hadoop.util.ToolRunner;
 
 /*******************************************************
  * JobClient interacts with the JobTracker network interface.
@@ -37,7 +65,7 @@
  * with the networked job system.
  *
  *******************************************************/
-public class JobClient extends ToolBase implements MRConstants  {
+public class JobClient extends Configured implements MRConstants, Tool  {
   private static final Log LOG = LogFactory.getLog("org.apache.hadoop.mapred.JobClient");
   public static enum TaskStatusFilter { NONE, FAILED, SUCCEEDED, ALL }
   private TaskStatusFilter taskOutputFilter = TaskStatusFilter.FAILED; 
@@ -171,6 +199,7 @@
     /**
      * Dump stats to screen
      */
+    @Override
     public String toString() {
       try {
         ensureFreshStatus();
@@ -261,7 +290,7 @@
   public synchronized FileSystem getFs() throws IOException {
     if (this.fs == null) {
       String fsName = jobSubmitClient.getFilesystemName();
-      this.fs = FileSystem.getNamed(fsName, this.conf);
+      this.fs = FileSystem.getNamed(fsName, getConf());
     }
     return fs;
   }
@@ -824,7 +853,7 @@
   /**
    */
   public static void main(String argv[]) throws Exception {
-    int res = new JobClient().doMain(new Configuration(), argv);
+    int res = ToolRunner.run(new JobClient(), argv);
     System.exit(res);
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Mon Aug 20 13:41:07 2007
@@ -60,7 +60,7 @@
  * A Map-reduce program to recursively copy directories between
  * different file-systems.
  */
-public class CopyFiles extends ToolBase {
+public class CopyFiles implements Tool {
   private static final String HDFS = "hdfs";
   private static final String S3 = "s3";
   
@@ -74,6 +74,8 @@
   private static final String readFailuresAttribute = 
     "distcp.ignore.read.failures";
   
+  private JobConf conf;
+  
   public void setConf(Configuration conf) {
     if (conf instanceof JobConf) {
       this.conf = (JobConf) conf;
@@ -82,6 +84,17 @@
     }
   }
   
+  public Configuration getConf() {
+    return conf;
+  }
+  
+  public CopyFiles() {
+  }
+  
+  public CopyFiles(Configuration conf) {
+    setConf(conf);
+  }
+  
   /**
    * Base-class for all mappers for distcp
    */
@@ -250,6 +263,7 @@
      * @param logPath : The log Path.
      * @param ignoreReadFailures : Ignore read failures?
      */
+    @Override
     public void setup(Configuration conf, JobConf jobConf, 
                       String[] srcPaths, String destPath, 
                       Path logPath, boolean ignoreReadFailures) 
@@ -354,6 +368,7 @@
       
     }
     
+    @Override
     public void cleanup(Configuration conf, JobConf jobConf, 
                         String srcPath, String destPath) 
       throws IOException
@@ -372,6 +387,7 @@
      * top-level paths on source and destination directories.
      * Gets the named file systems, to be used later in map.
      */
+    @Override
     public void configure(JobConf job) 
     {
       String srcfs = job.get("copy.src.fs", "local");
@@ -421,6 +437,7 @@
       }
     }
     
+    @Override
     public void close() {
       // nothing
     }
@@ -445,6 +462,7 @@
      * @param logPath : The log Path.
      * @param ignoreReadFailures : Ignore read failures?
      */
+    @Override
     public void setup(Configuration conf, JobConf jobConf, 
                       String[] srcPaths, String destPath, 
                       Path logPath, boolean ignoreReadFailures) 
@@ -491,6 +509,7 @@
       }
     }	
     
+    @Override
     public void cleanup(Configuration conf, JobConf jobConf, 
                         String srcPath, String destPath) 
       throws IOException
@@ -504,6 +523,7 @@
       }
     }
     
+    @Override
     public void configure(JobConf job)
     {
       //Save jobConf
@@ -861,9 +881,10 @@
   }
   
   public static void main(String[] args) throws Exception {
-    int res = new CopyFiles().doMain(
-                                     new JobConf(new Configuration(), CopyFiles.class), 
-                                     args);
+    JobConf job = new JobConf(new Configuration(), CopyFiles.class);
+    CopyFiles distcp = new CopyFiles();
+    distcp.setConf(job);
+    int res = ToolRunner.run(distcp, args);
     System.exit(res);
   }
 }

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java?rev=567824&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/GenericOptionsParser.java Mon Aug 20 13:41:07 2007
@@ -0,0 +1,213 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import org.apache.commons.cli.CommandLine;
+import org.apache.commons.cli.CommandLineParser;
+import org.apache.commons.cli.GnuParser;
+import org.apache.commons.cli.HelpFormatter;
+import org.apache.commons.cli.Option;
+import org.apache.commons.cli.OptionBuilder;
+import org.apache.commons.cli.Options;
+import org.apache.commons.cli.ParseException;
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+
+/*************************************************************
+ * This class can be used as a utility to parse command line
+ * arguments generic to the Hadoop framework. This class 
+ * recognizes several command line arguments, which allow a user 
+ * to specify a namenode, a job tracker etc. Generic options 
+ * supported are 
+ * <p>-conf <configuration file>     specify an application configuration file
+ * <p>-D <property=value>            use value for given property
+ * <p>-fs <local|namenode:port>      specify a namenode
+ * <p>-jt <local|jobtracker:port>    specify a job tracker
+ * <br>
+ * <p>The general command line syntax is
+ * <p>bin/hadoop command [genericOptions] [commandOptions]
+ * <br>
+ * Generic command line arguments <strong>might</strong> modify 
+ * <code>Configuration </code> objects, given to constructors  
+ * <br><br>
+ * The functionality is implemented using Commons CLI.
+ * <br>
+ * <p>Examples using generic options are
+ * <p>bin/hadoop dfs -fs darwin:8020 -ls /data
+ * <p><blockquote><pre>
+ *     list /data directory in dfs with namenode darwin:8020
+ * </pre></blockquote>
+ * <p>bin/hadoop dfs -D fs.default.name=darwin:8020 -ls /data
+ * <p><blockquote><pre>
+ *     list /data directory in dfs with namenode darwin:8020
+ * </pre></blockquote>
+ * <p>bin/hadoop dfs -conf hadoop-site.xml -ls /data
+ * <p><blockquote><pre>
+ *     list /data directory in dfs with conf specified in hadoop-site.xml
+ * </pre></blockquote>
+ * <p>bin/hadoop job -D mapred.job.tracker=darwin:50020 -submit job.xml
+ * <p><blockquote><pre>
+ *     submit a job to job tracker darwin:50020
+ * </pre></blockquote>
+ * <p>bin/hadoop job -jt darwin:50020 -submit job.xml
+ * <p><blockquote><pre>
+ *     submit a job to job tracker darwin:50020
+ * </pre></blockquote>
+ * <p>bin/hadoop job -jt local -submit job.xml
+ * <p><blockquote><pre>
+ *     submit a job to local runner
+ * </pre></blockquote>
+ *
+ * @see Tool
+ * @see ToolRunner
+ */
+public class GenericOptionsParser {
+
+  private static final Log LOG = LogFactory.getLog(GenericOptionsParser.class);
+
+  private CommandLine commandLine;
+
+  /** Instantinates a GenericOptionsParser to parse only
+   * the generic Hadoop  arguments. The array of string arguments 
+   * other than the generic arguments can be obtained by 
+   * {@link #getRemainingArgs()}
+   * @param conf the configuration to modify
+   * @param args User-specified arguments
+   */
+  public GenericOptionsParser(Configuration conf, String[] args) {
+    this(conf, new Options(), args); 
+  }
+
+  /** 
+   * Instantinates a GenericOptionsParser to parse given options 
+   * as well as generic Hadoop options. The resulting <code>
+   * CommandLine</code> object can be obtained by {@link #getCommandLine()}
+   * @param conf the configuration to modify  
+   * @param options options built by the caller 
+   * @param args User-specified arguments
+   */
+  public GenericOptionsParser(Configuration conf, Options options, String[] args) {
+    parseGeneralOptions(options, conf, args);
+  }
+
+  /**
+   * Returns an array of Strings containing only command-specific 
+   * arguments.
+   * @return String array of remaining arguments not parsed
+   */
+  public String[] getRemainingArgs() {
+    return commandLine.getArgs();
+  }
+
+  /**
+   * Returns the commons-cli <code>CommandLine</code> object 
+   * to process the parsed arguments. Note : if the object is 
+   * created with <code>GenericCommandLineParser(Configuration, String[])</code>, 
+   * then returned object will only contain parsed generic 
+   * options.
+   * @return CommandLine object representing list of arguments 
+   * parsed against Options descriptor.
+   */
+  public CommandLine getCommandLine() {
+    return commandLine;
+  }
+
+  /**
+   * Specify properties of each generic option
+   */
+  @SuppressWarnings("static-access")
+  private Options buildGeneralOptions(Options opts) {
+    Option fs = OptionBuilder.withArgName("local|namenode:port")
+    .hasArg()
+    .withDescription("specify a namenode")
+    .create("fs");
+    Option jt = OptionBuilder.withArgName("local|jobtracker:port")
+    .hasArg()
+    .withDescription("specify a job tracker")
+    .create("jt");
+    Option oconf = OptionBuilder.withArgName("configuration file")
+    .hasArg()
+    .withDescription("specify an application configuration file")
+    .create("conf");
+    Option property = OptionBuilder.withArgName("property=value")
+    .hasArgs()
+    .withArgPattern("=", 1)
+    .withDescription("use value for given property")
+    .create('D');
+
+    opts.addOption(fs);
+    opts.addOption(jt);
+    opts.addOption(oconf);
+    opts.addOption(property);
+    
+    return opts;
+  }
+
+  /**
+   * Modify configuration according user-specified generic options
+   * @param conf Configuration to be modified
+   * @param line User-specified generic options
+   */
+  private void processGeneralOptions(Configuration conf,
+      CommandLine line) {
+    if (line.hasOption("fs")) {
+      conf.set("fs.default.name", line.getOptionValue("fs"));
+    }
+
+    if (line.hasOption("jt")) {
+      conf.set("mapred.job.tracker", line.getOptionValue("jt"));
+    }
+    if (line.hasOption("conf")) {
+      conf.addFinalResource(new Path(line.getOptionValue("conf")));
+    }
+    if (line.hasOption('D')) {
+      String[] property = line.getOptionValues('D');
+      for(int i=0; i<property.length-1; i=i+2) {
+        if (property[i]!=null)
+          conf.set(property[i], property[i+1]);
+      }
+    }
+  }
+
+  /**
+   * Parse the user-specified options, get the generic options, and modify
+   * configuration accordingly
+   * @param conf Configuration to be modified
+   * @param args User-specified arguments
+   * @return Command-specific arguments
+   */
+  private String[] parseGeneralOptions(Options opts, Configuration conf, 
+      String[] args) {
+    opts = buildGeneralOptions(opts);
+    CommandLineParser parser = new GnuParser();
+    try {
+      commandLine = parser.parse(opts, args, true);
+      processGeneralOptions(conf, commandLine);
+      return commandLine.getArgs();
+    } catch(ParseException e) {
+      LOG.warn("options parsing failed: "+e.getMessage());
+
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp("general options are: ", opts);
+    }
+    return args;
+  }
+
+}

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java Mon Aug 20 13:41:07 2007
@@ -26,7 +26,7 @@
  */
 public interface Tool extends Configurable {
   /**
-   * execute the command with the given arguments
+   * Execute the command with the given arguments.
    * @param args command specific arguments
    * @return exit code
    * @throws Exception

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java Mon Aug 20 13:41:07 2007
@@ -18,70 +18,19 @@
 
 package org.apache.hadoop.util;
 
-import java.io.IOException;
-
-import org.apache.commons.cli.CommandLine;
-import org.apache.commons.cli.CommandLineParser;
-import org.apache.commons.cli.GnuParser;
-import org.apache.commons.cli.HelpFormatter;
-import org.apache.commons.cli.Option;
-import org.apache.commons.cli.OptionBuilder;
-import org.apache.commons.cli.Options;
-import org.apache.commons.cli.ParseException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.Path;
 
 /*************************************************************
- * This is a base class to support generic commonad options.
- * Generic command options allow a user to specify a namenode,
- * a job tracker etc. Generic options supported are 
- * <p>-conf <configuration file>     specify an application configuration file
- * <p>-D <property=value>            use value for given property
- * <p>-fs <local|namenode:port>      specify a namenode
- * <p>-jt <local|jobtracker:port>    specify a job tracker
- * <br>
- * <p>The general command line syntax is
- * <p>bin/hadoop command [genericOptions] [commandOptions]
- * 
- * <p>For every tool that inherits from ToolBase, generic options are 
- * handled by ToolBase while command options are passed to the tool.
- * Generic options handling is implemented using Common CLI.
- * 
- * <p>Tools that inherit from ToolBase in Hadoop are
- * DFSShell, DFSck, JobClient, and CopyFiles.
- * <br>
- * <p>Examples using generic options are
- * <p>bin/hadoop dfs -fs darwin:8020 -ls /data
- * <p><blockquote><pre>
- *     list /data directory in dfs with namenode darwin:8020
- * </pre></blockquote>
- * <p>bin/hadoop dfs -D fs.default.name=darwin:8020 -ls /data
- * <p><blockquote><pre>
- *     list /data directory in dfs with namenode darwin:8020
- * </pre></blockquote>
- * <p>bin/hadoop dfs -conf hadoop-site.xml -ls /data
- * <p><blockquote><pre>
- *     list /data directory in dfs with conf specified in hadoop-site.xml
- * </pre></blockquote>
- * <p>bin/hadoop job -D mapred.job.tracker=darwin:50020 -submit job.xml
- * <p><blockquote><pre>
- *     submit a job to job tracker darwin:50020
- * </pre></blockquote>
- * <p>bin/hadoop job -jt darwin:50020 -submit job.xml
- * <p><blockquote><pre>
- *     submit a job to job tracker darwin:50020
- * </pre></blockquote>
- * <p>bin/hadoop job -jt local -submit job.xml
- * <p><blockquote><pre>
- *     submit a job to local runner
- * </pre></blockquote>
- *        
+ * @deprecated This class is depracated. Classes 
+ * extending ToolBase should rather implement {@link Tool} 
+ * interface, and use {@link ToolRunner} for execution 
+ * functionality. Alternativelly, {@link GenericOptionsParser} 
+ * can be used to parse generic arguments related to hadoop 
+ * framework. 
  */
+@Deprecated
 public abstract class ToolBase implements Tool {
-  private static final Log LOG = LogFactory.getLog(
-                                                   "org.apache.hadoop.util.ToolBase");
+  
   public Configuration conf;
 
   public void setConf(Configuration conf) {
@@ -91,87 +40,7 @@
   public Configuration getConf() {
     return conf;
   }
-    
-  /*
-   * Specify properties of each generic option
-   */
-  static private Options buildGeneralOptions() {
-    Option fs = OptionBuilder.withArgName("local|namenode:port")
-      .hasArg()
-      .withDescription("specify a namenode")
-      .create("fs");
-    Option jt = OptionBuilder.withArgName("local|jobtracker:port")
-      .hasArg()
-      .withDescription("specify a job tracker")
-      .create("jt");
-    Option oconf = OptionBuilder.withArgName("configuration file")
-      .hasArg()
-      .withDescription("specify an application configuration file")
-      .create("conf");
-    Option property = OptionBuilder.withArgName("property=value")
-      .hasArgs()
-      .withArgPattern("=", 1)
-      .withDescription("use value for given property")
-      .create('D');
-    Options opts = new Options();
-    opts.addOption(fs);
-    opts.addOption(jt);
-    opts.addOption(oconf);
-    opts.addOption(property);
-        
-    return opts;
-  }
-    
-  /*
-   * Modify configuration according user-specified generic options
-   * @param conf Configuration to be modified
-   * @param line User-specified generic options
-   */
-  static private void processGeneralOptions(Configuration conf,
-                                            CommandLine line) {
-    if (line.hasOption("fs")) {
-      conf.set("fs.default.name", line.getOptionValue("fs"));
-    }
-        
-    if (line.hasOption("jt")) {
-      conf.set("mapred.job.tracker", line.getOptionValue("jt"));
-    }
-    if (line.hasOption("conf")) {
-      conf.addFinalResource(new Path(line.getOptionValue("conf")));
-    }
-    if (line.hasOption('D')) {
-      String[] property = line.getOptionValues('D');
-      for(int i=0; i<property.length-1; i=i+2) {
-        if (property[i]!=null)
-          conf.set(property[i], property[i+1]);
-      }
-    }           
-  }
- 
-  /**
-   * Parse the user-specified options, get the generic options, and modify
-   * configuration accordingly
-   * @param conf Configuration to be modified
-   * @param args User-specified arguments
-   * @return Commoand-specific arguments
-   */
-  static private String[] parseGeneralOptions(Configuration conf, 
-                                              String[] args) {
-    Options opts = buildGeneralOptions();
-    CommandLineParser parser = new GnuParser();
-    try {
-      CommandLine line = parser.parse(opts, args, true);
-      processGeneralOptions(conf, line);
-      return line.getArgs();
-    } catch(ParseException e) {
-      LOG.warn("options parsing failed: "+e.getMessage());
-
-      HelpFormatter formatter = new HelpFormatter();
-      formatter.printHelp("general options are: ", opts);
-    }
-    return args;
-  }
-
+     
   /**
    * Work as a main program: execute a command and handle exception if any
    * @param conf Application default configuration
@@ -182,9 +51,7 @@
    * positive non-zero values can be used to return application-specific codes.
    */
   public final int doMain(Configuration conf, String[] args) throws Exception {
-    String [] commandOptions = parseGeneralOptions(conf, args);
-    setConf(conf);
-    return this.run(commandOptions);
+    return ToolRunner.run(this, args);
   }
 
 }

Added: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java?rev=567824&view=auto
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java (added)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolRunner.java Mon Aug 20 13:41:07 2007
@@ -0,0 +1,68 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.util;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * ToolRunner can be used to run classes implementing {@link Tool}
+ * interface. Static method {@link #run(Tool, String[])} is used.
+ * {@link GenericOptionsParser} is used to parse the hadoop generic 
+ * arguments to modify the <code>Configuration</code>.
+ */
+public class ToolRunner {
+ 
+  /**
+   * Runs the given Tool by {@link Tool#run(String[])}, with the 
+   * given arguments. Uses the given configuration, or builds one if null.
+   * Sets the possibly modified version of the conf by Tool#setConf()  
+   * 
+   * @param conf Configuration object to use
+   * @param tool The Tool to run
+   * @param args the arguments to the tool(including generic arguments
+   * , see {@link GenericOptionsParser})
+   * @return exit code of the {@link Tool#run(String[])} method
+   */
+  public static int run(Configuration conf, Tool tool, String[] args) 
+    throws Exception{
+    if(conf == null) {
+      conf = new Configuration();
+    }
+    GenericOptionsParser parser = new GenericOptionsParser(conf, args);
+    //set the configuration back, so that Tool can configure itself
+    tool.setConf(conf);
+    
+    //get the args w/o generic hadoop args
+    String[] toolArgs = parser.getRemainingArgs();
+    return tool.run(toolArgs);
+  }
+  
+  /**
+   * Runs the tool with the tool's Configuration
+   * Equivalent to <code>run(tool.getConf(), tool, args)</code>.
+   * @param tool The Tool to run
+   * @param args the arguments to the tool(including generic arguments
+   * , see {@link GenericOptionsParser})
+   * @return exit code of the {@link Tool#run(String[])} method
+   */
+  public static int run(Tool tool, String[] args) 
+    throws Exception{
+    return run(tool.getConf(), tool, args);
+  }
+  
+}

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java Mon Aug 20 13:41:07 2007
@@ -17,14 +17,17 @@
  */
 package org.apache.hadoop.dfs;
 
-import java.io.*;
-import java.net.*;
+import java.io.File;
+import java.io.IOException;
+import java.net.InetSocketAddress;
 import java.util.ArrayList;
 import java.util.Collection;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.dfs.FSConstants.StartupOption;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FileUtil;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * This class creates a single-process DFS cluster for junit testing.
@@ -215,7 +218,7 @@
       throw new IllegalStateException("Attempting to finalize "
                                       + "Namenode but it is not running");
     }
-    new DFSAdmin().doMain(conf, new String[] {"-finalizeUpgrade"});
+    ToolRunner.run(new DFSAdmin(conf), new String[] {"-finalizeUpgrade"});
   }
   
   /**

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestDFSShellGenericOptions.java Mon Aug 20 13:41:07 2007
@@ -25,11 +25,10 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FsShell;
-import org.apache.hadoop.dfs.DataNode;
-import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FsShell;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.util.ToolRunner;
 
 public class TestDFSShellGenericOptions extends TestCase {
 
@@ -99,7 +98,7 @@
     FsShell shell=new FsShell();
     FileSystem fs=null;
     try {
-      shell.doMain(new Configuration(), args);
+      ToolRunner.run(shell, args);
       fs = new DistributedFileSystem(
                                      DataNode.createSocketAddr(namenode), 
                                      shell.getConf());

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFsck.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFsck.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFsck.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/TestFsck.java Mon Aug 20 13:41:07 2007
@@ -20,9 +20,12 @@
 
 import java.io.ByteArrayOutputStream;
 import java.io.PrintStream;
-import junit.framework.*;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.*;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.util.ToolRunner;
 
 /**
  * A JUnit test for doing fsck
@@ -35,9 +38,11 @@
 
   
   
+  @Override
   protected void setUp() throws Exception {
   }
 
+  @Override
   protected void tearDown() throws Exception {
   }
   
@@ -54,7 +59,7 @@
       ByteArrayOutputStream bStream = new ByteArrayOutputStream();
       PrintStream newOut = new PrintStream(bStream, true);
       System.setOut(newOut);
-      assertEquals(0, new DFSck().doMain(conf, new String[] {"/"}));
+      assertEquals(0, ToolRunner.run(new DFSck(conf), new String[] {"/"}));
       System.setOut(oldOut);
       String outStr = bStream.toString();
       assertTrue(-1 != outStr.indexOf("HEALTHY"));
@@ -67,7 +72,7 @@
       bStream = new ByteArrayOutputStream();
       newOut = new PrintStream(bStream, true);
       System.setOut(newOut);
-      assertEquals(0, new DFSck().doMain(conf, new String[] {"/"}));
+      assertEquals(0, ToolRunner.run(new DFSck(conf), new String[] {"/"}));
       System.setOut(oldOut);
       outStr = bStream.toString();
       // expect the result is corrupt
@@ -95,7 +100,7 @@
       ByteArrayOutputStream bStream = new ByteArrayOutputStream();
       PrintStream newOut = new PrintStream(bStream, true);
       System.setOut(newOut);
-      assertEquals(0, new DFSck().doMain(conf, new String[] {"/non-existent"}));
+      assertEquals(0, ToolRunner.run(new DFSck(conf), new String[] {"/non-existent"}));
       System.setOut(oldOut);
       String outStr = bStream.toString();
       assertEquals(-1, outStr.indexOf("HEALTHY"));

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestCopyFiles.java Mon Aug 20 13:41:07 2007
@@ -19,12 +19,15 @@
 package org.apache.hadoop.fs;
 
 import java.io.IOException;
-import java.util.Random;
 import java.net.URI;
-import junit.framework.*;
+import java.util.Random;
+
+import junit.framework.TestCase;
+
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.dfs.MiniDFSCluster;
 import org.apache.hadoop.util.CopyFiles;
+import org.apache.hadoop.util.ToolRunner;
 
 
 /**
@@ -83,9 +86,11 @@
 
   
   
+  @Override
   protected void setUp() throws Exception {
   }
 
+  @Override
   protected void tearDown() throws Exception {
   }
   
@@ -163,7 +168,7 @@
   /** copy files from local file system to local file system */
   public void testCopyFromLocalToLocal() throws Exception {
     MyFile[] files = createFiles("local", TEST_ROOT_DIR+"/srcdat");
-    new CopyFiles().doMain(new Configuration(),
+    ToolRunner.run(new CopyFiles(new Configuration()),
                            new String[] {"file://"+TEST_ROOT_DIR+"/srcdat",
                                          "file://"+TEST_ROOT_DIR+"/destdat"});
     assertTrue("Source and destination directories do not match.",
@@ -182,7 +187,7 @@
       namenode = conf.get("fs.default.name", "local");
       if (!"local".equals(namenode)) {
         MyFile[] files = createFiles(namenode, "/srcdat");
-        new CopyFiles().doMain(conf, new String[] {"hdfs://"+namenode+"/srcdat",
+        ToolRunner.run(new CopyFiles(conf), new String[] {"hdfs://"+namenode+"/srcdat",
                                                    "hdfs://"+namenode+"/destdat",
                                                    "-log",
                                                    "hdfs://"+namenode+"/logs"});
@@ -210,7 +215,7 @@
       namenode = conf.get("fs.default.name", "local");
       if (!"local".equals(namenode)) {
         MyFile[] files = createFiles("local", TEST_ROOT_DIR+"/srcdat");
-        new CopyFiles().doMain(conf, new String[] {"file://"+TEST_ROOT_DIR+"/srcdat",
+        ToolRunner.run(new CopyFiles(conf), new String[] {"file://"+TEST_ROOT_DIR+"/srcdat",
                                                    "hdfs://"+namenode+"/destdat",
                                                    "-log",
                                                    "hdfs://"+namenode+"/logs"});
@@ -238,7 +243,7 @@
       namenode = conf.get("fs.default.name", "local");
       if (!"local".equals(namenode)) {
         MyFile[] files = createFiles(namenode, "/srcdat");
-        new CopyFiles().doMain(conf, new String[] {"hdfs://"+namenode+"/srcdat",
+        ToolRunner.run(new CopyFiles(conf), new String[] {"hdfs://"+namenode+"/srcdat",
                                                    "file://"+TEST_ROOT_DIR+"/destdat",
                                                    "-log",
                                                    "/logs"});

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java?rev=567824&r1=567823&r2=567824&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/fs/TestFileSystem.java Mon Aug 20 13:41:07 2007
@@ -18,17 +18,31 @@
 
 package org.apache.hadoop.fs;
 
-import java.io.*;
-import java.util.*;
-import junit.framework.TestCase;
+import java.io.DataInputStream;
+import java.io.IOException;
+import java.io.OutputStream;
+import java.util.Arrays;
+import java.util.Random;
 
-import org.apache.commons.logging.*;
+import junit.framework.TestCase;
 
-import org.apache.hadoop.mapred.*;
-import org.apache.hadoop.mapred.lib.*;
-import org.apache.hadoop.io.*;
+import org.apache.commons.logging.Log;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.SequenceFile;
+import org.apache.hadoop.io.UTF8;
+import org.apache.hadoop.io.Writable;
+import org.apache.hadoop.io.WritableComparable;
 import org.apache.hadoop.io.SequenceFile.CompressionType;
-import org.apache.hadoop.conf.*;
+import org.apache.hadoop.mapred.InputFormatBase;
+import org.apache.hadoop.mapred.JobClient;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.Mapper;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+import org.apache.hadoop.mapred.SequenceFileInputFormat;
+import org.apache.hadoop.mapred.lib.LongSumReducer;
 
 public class TestFileSystem extends TestCase {
   private static final Log LOG = InputFormatBase.LOG;