You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2007/04/16 23:44:46 UTC

svn commit: r529410 [23/27] - in /lucene/hadoop/trunk: ./ src/contrib/abacus/src/examples/org/apache/hadoop/abacus/examples/ src/contrib/abacus/src/java/org/apache/hadoop/abacus/ src/contrib/data_join/src/java/org/apache/hadoop/contrib/utils/join/ src/...

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/record/compiler/generated/TokenMgrError.java Mon Apr 16 14:44:35 2007
@@ -21,131 +21,131 @@
 
 public class TokenMgrError extends Error
 {
-   /*
-    * Ordinals for various reasons why an Error of this type can be thrown.
-    */
-
-   /**
-    * Lexical error occured.
-    */
-   static final int LEXICAL_ERROR = 0;
-
-   /**
-    * An attempt wass made to create a second instance of a static token manager.
-    */
-   static final int STATIC_LEXER_ERROR = 1;
-
-   /**
-    * Tried to change to an invalid lexical state.
-    */
-   static final int INVALID_LEXICAL_STATE = 2;
-
-   /**
-    * Detected (and bailed out of) an infinite loop in the token manager.
-    */
-   static final int LOOP_DETECTED = 3;
-
-   /**
-    * Indicates the reason why the exception is thrown. It will have
-    * one of the above 4 values.
-    */
-   int errorCode;
-
-   /**
-    * Replaces unprintable characters by their espaced (or unicode escaped)
-    * equivalents in the given string
-    */
-   protected static final String addEscapes(String str) {
-      StringBuffer retval = new StringBuffer();
-      char ch;
-      for (int i = 0; i < str.length(); i++) {
-        switch (str.charAt(i))
+  /*
+   * Ordinals for various reasons why an Error of this type can be thrown.
+   */
+
+  /**
+   * Lexical error occured.
+   */
+  static final int LEXICAL_ERROR = 0;
+
+  /**
+   * An attempt wass made to create a second instance of a static token manager.
+   */
+  static final int STATIC_LEXER_ERROR = 1;
+
+  /**
+   * Tried to change to an invalid lexical state.
+   */
+  static final int INVALID_LEXICAL_STATE = 2;
+
+  /**
+   * Detected (and bailed out of) an infinite loop in the token manager.
+   */
+  static final int LOOP_DETECTED = 3;
+
+  /**
+   * Indicates the reason why the exception is thrown. It will have
+   * one of the above 4 values.
+   */
+  int errorCode;
+
+  /**
+   * Replaces unprintable characters by their espaced (or unicode escaped)
+   * equivalents in the given string
+   */
+  protected static final String addEscapes(String str) {
+    StringBuffer retval = new StringBuffer();
+    char ch;
+    for (int i = 0; i < str.length(); i++) {
+      switch (str.charAt(i))
         {
-           case 0 :
-              continue;
-           case '\b':
-              retval.append("\\b");
-              continue;
-           case '\t':
-              retval.append("\\t");
-              continue;
-           case '\n':
-              retval.append("\\n");
-              continue;
-           case '\f':
-              retval.append("\\f");
-              continue;
-           case '\r':
-              retval.append("\\r");
-              continue;
-           case '\"':
-              retval.append("\\\"");
-              continue;
-           case '\'':
-              retval.append("\\\'");
-              continue;
-           case '\\':
-              retval.append("\\\\");
-              continue;
-           default:
-              if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
-                 String s = "0000" + Integer.toString(ch, 16);
-                 retval.append("\\u" + s.substring(s.length() - 4, s.length()));
-              } else {
-                 retval.append(ch);
-              }
-              continue;
+        case 0 :
+          continue;
+        case '\b':
+          retval.append("\\b");
+          continue;
+        case '\t':
+          retval.append("\\t");
+          continue;
+        case '\n':
+          retval.append("\\n");
+          continue;
+        case '\f':
+          retval.append("\\f");
+          continue;
+        case '\r':
+          retval.append("\\r");
+          continue;
+        case '\"':
+          retval.append("\\\"");
+          continue;
+        case '\'':
+          retval.append("\\\'");
+          continue;
+        case '\\':
+          retval.append("\\\\");
+          continue;
+        default:
+          if ((ch = str.charAt(i)) < 0x20 || ch > 0x7e) {
+            String s = "0000" + Integer.toString(ch, 16);
+            retval.append("\\u" + s.substring(s.length() - 4, s.length()));
+          } else {
+            retval.append(ch);
+          }
+          continue;
         }
-      }
-      return retval.toString();
-   }
-
-   /**
-    * Returns a detailed message for the Error when it is thrown by the
-    * token manager to indicate a lexical error.
-    * Parameters : 
-    *    EOFSeen     : indicates if EOF caused the lexicl error
-    *    curLexState : lexical state in which this error occured
-    *    errorLine   : line number when the error occured
-    *    errorColumn : column number when the error occured
-    *    errorAfter  : prefix that was seen before this error occured
-    *    curchar     : the offending character
-    * Note: You can customize the lexical error message by modifying this method.
-    */
-   protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
-      return("Lexical error at line " +
+    }
+    return retval.toString();
+  }
+
+  /**
+   * Returns a detailed message for the Error when it is thrown by the
+   * token manager to indicate a lexical error.
+   * Parameters : 
+   *    EOFSeen     : indicates if EOF caused the lexicl error
+   *    curLexState : lexical state in which this error occured
+   *    errorLine   : line number when the error occured
+   *    errorColumn : column number when the error occured
+   *    errorAfter  : prefix that was seen before this error occured
+   *    curchar     : the offending character
+   * Note: You can customize the lexical error message by modifying this method.
+   */
+  protected static String LexicalError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar) {
+    return("Lexical error at line " +
            errorLine + ", column " +
            errorColumn + ".  Encountered: " +
            (EOFSeen ? "<EOF> " : ("\"" + addEscapes(String.valueOf(curChar)) + "\"") + " (" + (int)curChar + "), ") +
            "after : \"" + addEscapes(errorAfter) + "\"");
-   }
+  }
 
-   /**
-    * You can also modify the body of this method to customize your error messages.
-    * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
-    * of end-users concern, so you can return something like : 
-    *
-    *     "Internal Error : Please file a bug report .... "
-    *
-    * from this method for such cases in the release version of your parser.
-    */
-   public String getMessage() {
-      return super.getMessage();
-   }
-
-   /*
-    * Constructors of various flavors follow.
-    */
-
-   public TokenMgrError() {
-   }
-
-   public TokenMgrError(String message, int reason) {
-      super(message);
-      errorCode = reason;
-   }
-
-   public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
-      this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
-   }
+  /**
+   * You can also modify the body of this method to customize your error messages.
+   * For example, cases like LOOP_DETECTED and INVALID_LEXICAL_STATE are not
+   * of end-users concern, so you can return something like : 
+   *
+   *     "Internal Error : Please file a bug report .... "
+   *
+   * from this method for such cases in the release version of your parser.
+   */
+  public String getMessage() {
+    return super.getMessage();
+  }
+
+  /*
+   * Constructors of various flavors follow.
+   */
+
+  public TokenMgrError() {
+  }
+
+  public TokenMgrError(String message, int reason) {
+    super(message);
+    errorCode = reason;
+  }
+
+  public TokenMgrError(boolean EOFSeen, int lexState, int errorLine, int errorColumn, String errorAfter, char curChar, int reason) {
+    this(LexicalError(EOFSeen, lexState, errorLine, errorColumn, errorAfter, curChar), reason);
+  }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/tools/Logalyzer.java Mon Apr 16 14:44:35 2007
@@ -73,8 +73,8 @@
     }
     
     public void map(WritableComparable key, Writable value,
-        OutputCollector output, Reporter reporter)
-    throws IOException {
+                    OutputCollector output, Reporter reporter)
+      throws IOException {
       String text = ((Text)value).toString();
       Matcher matcher = pattern.matcher(text);
       while (matcher.find()) {
@@ -114,7 +114,7 @@
     }
     
     public int compare(byte[] b1, int s1, int l1,
-        byte[] b2, int s2, int l2) {
+                       byte[] b2, int s2, int l2) {
       
       if(sortSpec == null) {
         return super.compare(b1, s1, l1, b2, s2, l2);
@@ -143,9 +143,9 @@
           
           //Compare columns
           int comparision = super.compareBytes(
-              c1.getBytes(), 0, c1.length(),
-              c2.getBytes(), 0, c2.length()
-          );
+                                               c1.getBytes(), 0, c1.length(),
+                                               c2.getBytes(), 0, c2.length()
+                                               );
           
           //They differ!
           if(comparision != 0) {
@@ -174,11 +174,11 @@
    * @throws IOException
    */
   public void	
-  doArchive(String logListURI, String archiveDirectory)
-  throws IOException
+    doArchive(String logListURI, String archiveDirectory)
+    throws IOException
   {
     String destURL = new String("hdfs://" + fsConfig.get("fs.default.name", "local") + 
-        archiveDirectory);
+                                archiveDirectory);
     CopyFiles.copy(fsConfig, logListURI, destURL, true, false);
   }
   
@@ -192,16 +192,16 @@
    * @throws IOException
    */
   public void
-  doAnalyze(String inputFilesDirectory, String outputDirectory,
-      String grepPattern, String sortColumns, String columnSeparator)
-  throws IOException
+    doAnalyze(String inputFilesDirectory, String outputDirectory,
+              String grepPattern, String sortColumns, String columnSeparator)
+    throws IOException
   {		
     Path grepInput = new Path(inputFilesDirectory);
     
     Path analysisOutput = null;
     if(outputDirectory.equals("")) {
       analysisOutput =  new Path(inputFilesDirectory, "logalyzer_" + 
-          Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
+                                 Integer.toString(new Random().nextInt(Integer.MAX_VALUE)));
     } else {
       analysisOutput = new Path(outputDirectory);
     }
@@ -237,9 +237,9 @@
     
     String version = "Logalyzer.0.0.1";
     String usage = "Usage: Logalyzer [-archive -logs <urlsFile>] " +
-    "-archiveDir <archiveDirectory> " +
-    "-grep <pattern> -sort <column1,column2,...> -separator <separator> " +
-    "-analysis <outputDirectory>";
+      "-archiveDir <archiveDirectory> " +
+      "-grep <pattern> -sort <column1,column2,...> -separator <separator> " +
+      "-analysis <outputDirectory>";
     
     System.out.println(version);
     if (args.length == 0) {

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/CopyFiles.java Mon Apr 16 14:44:35 2007
@@ -99,8 +99,8 @@
      * @throws IOException
      */
     public abstract void setup(Configuration conf, JobConf jobConf, 
-        String[] srcPaths, String destPath, boolean ignoreReadFailures) 
-    throws IOException;
+                               String[] srcPaths, String destPath, boolean ignoreReadFailures) 
+      throws IOException;
     
     /**
      * Interface to cleanup *distcp* specific resources
@@ -111,7 +111,7 @@
      * @throws IOException
      */
     public abstract void cleanup(Configuration conf, JobConf jobConf, 
-        String srcPath, String destPath) throws IOException;
+                                 String srcPath, String destPath) throws IOException;
     
     /**
      * Make a path relative with respect to a root path.
@@ -156,25 +156,25 @@
      * @throws IOException
      */
     public int getMapCount(final int initialEstimate, final long totalBytes,
-        final JobClient client)
-    throws IOException {
-        int numMaps = initialEstimate;
-        if (numMaps > MAX_NUM_MAPS) {
-            numMaps = MAX_NUM_MAPS;
-        }
-        if (totalBytes != -1 &&
-            numMaps > (int)(totalBytes / MIN_BYTES_PER_MAP)) {
-          numMaps = (int) (totalBytes / MIN_BYTES_PER_MAP);
-        }
-        ClusterStatus cluster = client.getClusterStatus();
-        int tmpMaps = cluster.getTaskTrackers() * MAX_MAPS_PER_NODE;
-        if (numMaps > tmpMaps) {
-            numMaps = tmpMaps;
-        }
-        if (numMaps == 0) {
-            numMaps = 1;
-        }
-        return numMaps;
+                           final JobClient client)
+      throws IOException {
+      int numMaps = initialEstimate;
+      if (numMaps > MAX_NUM_MAPS) {
+        numMaps = MAX_NUM_MAPS;
+      }
+      if (totalBytes != -1 &&
+          numMaps > (int)(totalBytes / MIN_BYTES_PER_MAP)) {
+        numMaps = (int) (totalBytes / MIN_BYTES_PER_MAP);
+      }
+      ClusterStatus cluster = client.getClusterStatus();
+      int tmpMaps = cluster.getTaskTrackers() * MAX_MAPS_PER_NODE;
+      if (numMaps > tmpMaps) {
+        numMaps = tmpMaps;
+      }
+      if (numMaps == 0) {
+        numMaps = 1;
+      }
+      return numMaps;
     } 
   }
   
@@ -183,7 +183,7 @@
    * @author Milind Bhandarkar
    */
   public static class FSCopyFilesMapper extends CopyFilesMapper 
-  implements Mapper 
+    implements Mapper 
   {
     private int sizeBuf = 4096;
     private FileSystem srcFileSys = null;
@@ -224,11 +224,11 @@
           totalBytesCopied += bytesSinceLastReport;
           bytesSinceLastReport = 0L;
           reporter.setStatus("Copy "+ src + ": " + 
-              percentFormat.format(100.0 * totalBytesCopied / 
-                  totalBytes) +
-                  "% and " +
-                  StringUtils.humanReadableInt(totalBytesCopied) +
-          " bytes");
+                             percentFormat.format(100.0 * totalBytesCopied / 
+                                                  totalBytes) +
+                             "% and " +
+                             StringUtils.humanReadableInt(totalBytesCopied) +
+                             " bytes");
         }
       }
       
@@ -238,7 +238,7 @@
       totalBytesCopied += bytesSinceLastReport;
       bytesSinceLastReport = 0L;
       reporter.setStatus("Finished. Bytes copied: " + 
-          StringUtils.humanReadableInt(totalBytesCopied));
+                         StringUtils.humanReadableInt(totalBytesCopied));
     }
     
     /**
@@ -250,9 +250,9 @@
      * @param ignoreReadFailures : Ignore read failures?
      */
     public void setup(Configuration conf, JobConf jobConf, 
-        String[] srcPaths, String destPath, 
-        boolean ignoreReadFailures) 
-    throws IOException
+                      String[] srcPaths, String destPath, 
+                      boolean ignoreReadFailures) 
+      throws IOException
     {
       URI srcURI = toURI(srcPaths[0]);
       URI destURI = toURI(destPath);
@@ -298,7 +298,7 @@
       
       Random r = new Random();
       Path jobDirectory = new Path(jobConf.getSystemDir(), "distcp_" 
-          + Integer.toString(Math.abs(r.nextInt()), 36));
+                                   + Integer.toString(Math.abs(r.nextInt()), 36));
       Path inDir = new Path(jobDirectory, "in");
       Path fakeOutDir = new Path(jobDirectory, "out");
       FileSystem fileSys = FileSystem.get(jobConf);
@@ -359,8 +359,8 @@
     }
     
     public void cleanup(Configuration conf, JobConf jobConf, 
-        String srcPath, String destPath) 
-    throws IOException
+                        String srcPath, String destPath) 
+      throws IOException
     {
       //Clean up jobDirectory
       Path jobDirectory = new Path(jobConf.get("distcp.job.dir", "/"));
@@ -386,7 +386,7 @@
         srcFileSys = FileSystem.get(new URI(srcfs), job);
         destFileSys = FileSystem.get(new URI(destfs), job);
       } catch (URISyntaxException e) {
-          throw new RuntimeException("Failed parse of src or dest URI.", e);
+        throw new RuntimeException("Failed parse of src or dest URI.", e);
       } catch (IOException ex) {
         throw new RuntimeException("Unable to get the named file system.", ex);
       }
@@ -402,16 +402,16 @@
      * @param reporter
      */
     public void map(WritableComparable key,
-        Writable value,
-        OutputCollector out,
-        Reporter reporter) throws IOException {
+                    Writable value,
+                    OutputCollector out,
+                    Reporter reporter) throws IOException {
       String src = ((Text) key).toString();
       try {
         copy(src, reporter);
       } catch (IOException except) {
         if (ignoreReadFailures) {
           reporter.setStatus("Failed to copy " + src + " : " + 
-              StringUtils.stringifyException(except));
+                             StringUtils.stringifyException(except));
           try {
             destFileSys.delete(new Path(destPath, src));
           } catch (Throwable ex) {
@@ -430,7 +430,7 @@
   }
   
   public static class HTTPCopyFilesMapper extends CopyFilesMapper 
-  implements Mapper 
+    implements Mapper 
   {
     private URI srcURI = null;
     private FileSystem destFileSys = null;
@@ -447,9 +447,9 @@
      * @param ignoreReadFailures : Ignore read failures?
      */
     public void setup(Configuration conf, JobConf jobConf, 
-        String[] srcPaths, String destPath, 
-        boolean ignoreReadFailures) 
-    throws IOException
+                      String[] srcPaths, String destPath, 
+                      boolean ignoreReadFailures) 
+      throws IOException
     {
       //Destination
       URI destURI = toURI(destPath);
@@ -476,7 +476,7 @@
       FileSystem fileSystem = FileSystem.get(conf);
       Random r = new Random();
       Path jobDirectory = new Path(jobConf.getSystemDir(), "distcp_" + 
-          Integer.toString(Math.abs(r.nextInt()), 36));
+                                   Integer.toString(Math.abs(r.nextInt()), 36));
       Path jobInputDir = new Path(jobDirectory, "in");
       if (!fileSystem.mkdirs(jobInputDir)) {
         throw new IOException("Mkdirs failed to create " + jobInputDir.toString());
@@ -498,8 +498,8 @@
     }	
     
     public void cleanup(Configuration conf, JobConf jobConf, 
-        String srcPath, String destPath) 
-    throws IOException
+                        String srcPath, String destPath) 
+      throws IOException
     {
       //Clean up jobDirectory
       Path jobDirectory = new Path(jobConf.get("distcp.job.dir", "/"));
@@ -531,10 +531,10 @@
     }
     
     public void map(WritableComparable key,
-        Writable val,
-        OutputCollector out,
-        Reporter reporter) throws IOException 
-        {
+                    Writable val,
+                    OutputCollector out,
+                    Reporter reporter) throws IOException 
+    {
       //The url of the file
       try {
         srcURI = new URI(((Text)key).toString());
@@ -555,8 +555,8 @@
           new BufferedInputStream(connection.getInputStream());
         
         FSDataOutputStream os = destFileSys.create(destinationPath, true, 
-              bufferSize, (short)jobConf.getInt("dfs.replication", 3), 
-              jobConf.getLong("dfs.block.size", 67108864));
+                                                   bufferSize, (short)jobConf.getInt("dfs.replication", 3), 
+                                                   jobConf.getLong("dfs.block.size", 67108864));
         
         int readBytes = 0;
         while((readBytes = is.read(buffer, 0, bufferSize)) != -1) {
@@ -568,7 +568,7 @@
         connection.disconnect();
         
         reporter.setStatus("Copied: " + srcURI.toString() + 
-            " to: " + destinationPath.toString());
+                           " to: " + destinationPath.toString());
         
       } catch(Exception e) {
         reporter.setStatus("Failed to copy from: " + (Text)key);
@@ -588,17 +588,17 @@
   private static class CopyMapperFactory
   {
     public static CopyFilesMapper getMapper(Configuration conf, String protocol)
-    throws IOException
+      throws IOException
     {
       CopyFilesMapper mapper = null;
       if (protocol == null) {
-          // Use 'default' filesystem.
-          protocol = FileSystem.get(conf).getUri().getScheme();
+        // Use 'default' filesystem.
+        protocol = FileSystem.get(conf).getUri().getScheme();
       }
       protocol = protocol.toLowerCase();
       
       if(HDFS.equalsIgnoreCase(protocol) || "file".equalsIgnoreCase(protocol) ||
-          S3.equalsIgnoreCase(protocol)) {
+         S3.equalsIgnoreCase(protocol)) {
         mapper = new FSCopyFilesMapper();
       } else if("http".equalsIgnoreCase(protocol)) {
         mapper = new HTTPCopyFilesMapper();
@@ -619,11 +619,11 @@
     if("file".equalsIgnoreCase(srcListURIScheme)) {
       fis = new BufferedReader(new FileReader(srcListURIPath));
     } else if (srcListURIScheme != null &&
-          HDFS.equalsIgnoreCase(srcListURIScheme)) {
+               HDFS.equalsIgnoreCase(srcListURIScheme)) {
       FileSystem fs = FileSystem.get(srcListURI, conf);
       fis = new BufferedReader(
-          new InputStreamReader(fs.open(new Path(srcListURIPath)))
-          );
+                               new InputStreamReader(fs.open(new Path(srcListURIPath)))
+                               );
     } else if("http".equalsIgnoreCase(srcListURIScheme)) {
       //Copy the file 
       URL url = srcListURI.toURL();
@@ -632,8 +632,8 @@
       connection.connect();
       
       fis = new BufferedReader(
-          new InputStreamReader(connection.getInputStream())
-          );
+                               new InputStreamReader(connection.getInputStream())
+                               );
     } else {
       throw new IOException("Unsupported source list uri: " + srcListURIScheme);
     }
@@ -664,7 +664,7 @@
    * @return
    */
   private static String[] parseInputFile(String protocol, String[] uris)
-  throws IOException
+    throws IOException
   {
     ArrayList<String> protocolURIs = new ArrayList<String>(uris.length);
     
@@ -679,13 +679,13 @@
   }
   
   public static URI toURI(final String u) throws IOException {
-      URI result = null;
-      try {
-          result = new URI(u);
-      } catch (URISyntaxException ex) {
-        throw new IOException("Path does not parse as URI: " + u);
-      }
-      return result;
+    URI result = null;
+    try {
+      result = new URI(u);
+    } catch (URISyntaxException ex) {
+      throw new IOException("Path does not parse as URI: " + u);
+    }
+    return result;
   }
   
   /**
@@ -697,8 +697,8 @@
    * @param ignoreReadFailures True if we are to ignore read failures.
    */
   public static void copy(Configuration conf, String srcPath, String destPath,
-      boolean srcAsList, boolean ignoreReadFailures) 
-  throws IOException
+                          boolean srcAsList, boolean ignoreReadFailures) 
+    throws IOException
   {
     //Job configuration
     JobConf jobConf = new JobConf(conf, CopyFiles.class);
@@ -816,8 +816,8 @@
   
   public static void main(String[] args) throws Exception {
     int res = new CopyFiles().doMain(
-        new JobConf(new Configuration(), CopyFiles.class), 
-        args);
+                                     new JobConf(new Configuration(), CopyFiles.class), 
+                                     args);
     System.exit(res);
   }
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/NativeCodeLoader.java Mon Apr 16 14:44:35 2007
@@ -49,7 +49,7 @@
     
     if (!nativeCodeLoaded) {
       LOG.warn("Unable to load native-hadoop library for your platform... " +
-      "using builtin-java classes where applicable");
+               "using builtin-java classes where applicable");
     }
   }
 

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PlatformName.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PlatformName.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PlatformName.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PlatformName.java Mon Apr 16 14:44:35 2007
@@ -29,8 +29,8 @@
    * per the java-vm.
    */
   private static final String platformName = System.getProperty("os.name") + "-" + 
-                                      System.getProperty("os.arch") + "-" +
-                                      System.getProperty("sun.arch.data.model");
+    System.getProperty("os.arch") + "-" +
+    System.getProperty("sun.arch.data.model");
   
   /**
    * Get the complete platform as per the java-vm.

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PriorityQueue.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PriorityQueue.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PriorityQueue.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/PriorityQueue.java Mon Apr 16 14:44:35 2007
@@ -27,7 +27,7 @@
   private int maxSize;
 
   /** Determines the ordering of objects in this priority queue.  Subclasses
-    must define this one method. */
+      must define this one method. */
   protected abstract boolean lessThan(Object a, Object b);
 
   /** Subclass constructors must call this. */
@@ -67,7 +67,7 @@
     }
     else
       return false;
-   }
+  }
 
   /** Returns the least element of the PriorityQueue in constant time. */
   public final Object top() {
@@ -78,7 +78,7 @@
   }
 
   /** Removes and returns the least element of the PriorityQueue in log(size)
-    time. */
+      time. */
   public final Object pop() {
     if (size > 0) {
       Object result = heap[1];			  // save first value

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ProgramDriver.java Mon Apr 16 14:44:35 2007
@@ -28,116 +28,116 @@
 
 public class ProgramDriver {
     
-    /**
-     * A description of a program based on its class and a 
-     * human-readable description.
-     * @author Owen O'Malley
-     * @date april 2006
-     */
-     Map<String, ProgramDescription> programs;
+  /**
+   * A description of a program based on its class and a 
+   * human-readable description.
+   * @author Owen O'Malley
+   * @date april 2006
+   */
+  Map<String, ProgramDescription> programs;
      
-     public ProgramDriver(){
-        programs = new TreeMap<String, ProgramDescription>();
-     }
+  public ProgramDriver(){
+    programs = new TreeMap<String, ProgramDescription>();
+  }
      
-    static private class ProgramDescription {
-	
-	static final Class[] paramTypes = new Class[] {String[].class};
+  static private class ProgramDescription {
 	
-	/**
-	 * Create a description of an example program.
-	 * @param mainClass the class with the main for the example program
-	 * @param description a string to display to the user in help messages
-	 * @throws SecurityException if we can't use reflection
-	 * @throws NoSuchMethodException if the class doesn't have a main method
-	 */
-	public ProgramDescription(Class mainClass, 
-				  String description)
-	    throws SecurityException, NoSuchMethodException {
-	    this.main = mainClass.getMethod("main", paramTypes);
-	    this.description = description;
-	}
-	
-	/**
-	 * Invoke the example application with the given arguments
-	 * @param args the arguments for the application
-	 * @throws Throwable The exception thrown by the invoked method
-	 */
-	public void invoke(String[] args)
-	    throws Throwable {
-	    try {
-		main.invoke(null, new Object[]{args});
-	    } catch (InvocationTargetException except) {
-		throw except.getCause();
-	    }
-	}
-	
-	public String getDescription() {
-	    return description;
-	}
+    static final Class[] paramTypes = new Class[] {String[].class};
 	
-	private Method main;
-	private String description;
-    }
-    
-    private static void printUsage(Map<String, ProgramDescription> programs) {
-	System.out.println("Valid program names are:");
-        for(Map.Entry<String, ProgramDescription> item : programs.entrySet()) {
-            System.out.println("  " + item.getKey() + ": " +
-                       item.getValue().getDescription());         
-        } 
-    }
-    
     /**
-     * This is the method that adds the classed to the repository
-     * @param name The name of the string you want the class instance to be called with
-     * @param mainClass The class that you want to add to the repository
-     * @param description The description of the class
-     * @throws NoSuchMethodException 
-     * @throws SecurityException 
+     * Create a description of an example program.
+     * @param mainClass the class with the main for the example program
+     * @param description a string to display to the user in help messages
+     * @throws SecurityException if we can't use reflection
+     * @throws NoSuchMethodException if the class doesn't have a main method
      */
-    public void addClass (String name, Class mainClass, String description) throws Throwable {
-	programs.put(name , new ProgramDescription(mainClass, description));
+    public ProgramDescription(Class mainClass, 
+                              String description)
+      throws SecurityException, NoSuchMethodException {
+      this.main = mainClass.getMethod("main", paramTypes);
+      this.description = description;
     }
-    
+	
     /**
-     * This is a driver for the example programs.
-     * It looks at the first command line argument and tries to find an
-     * example program with that name.
-     * If it is found, it calls the main method in that class with the rest 
-     * of the command line arguments.
-     * @param args The argument from the user. args[0] is the command to run.
-     * @throws NoSuchMethodException 
-     * @throws SecurityException 
-     * @throws IllegalAccessException 
-     * @throws IllegalArgumentException 
-     * @throws Throwable Anything thrown by the example program's main
+     * Invoke the example application with the given arguments
+     * @param args the arguments for the application
+     * @throws Throwable The exception thrown by the invoked method
      */
-    public void driver(String[] args) 
-	throws Throwable 
-    {
-	// Make sure they gave us a program name.
-	if (args.length == 0) {
-	    System.out.println("An example program must be given as the" + 
-			       " first argument.");
-	    printUsage(programs);
-	    return;
-	}
-	
-	// And that it is good.
-	ProgramDescription pgm = programs.get(args[0]);
-	if (pgm == null) {
-	    System.out.println("Unknown program '" + args[0] + "' chosen.");
-	    printUsage(programs);
-	    return;
-	}
-	
-	// Remove the leading argument and call main
-	String[] new_args = new String[args.length - 1];
-	for(int i=1; i < args.length; ++i) {
-	    new_args[i-1] = args[i];
-	}
-	pgm.invoke(new_args);
+    public void invoke(String[] args)
+      throws Throwable {
+      try {
+        main.invoke(null, new Object[]{args});
+      } catch (InvocationTargetException except) {
+        throw except.getCause();
+      }
+    }
+	
+    public String getDescription() {
+      return description;
+    }
+	
+    private Method main;
+    private String description;
+  }
+    
+  private static void printUsage(Map<String, ProgramDescription> programs) {
+    System.out.println("Valid program names are:");
+    for(Map.Entry<String, ProgramDescription> item : programs.entrySet()) {
+      System.out.println("  " + item.getKey() + ": " +
+                         item.getValue().getDescription());         
+    } 
+  }
+    
+  /**
+   * This is the method that adds the classed to the repository
+   * @param name The name of the string you want the class instance to be called with
+   * @param mainClass The class that you want to add to the repository
+   * @param description The description of the class
+   * @throws NoSuchMethodException 
+   * @throws SecurityException 
+   */
+  public void addClass (String name, Class mainClass, String description) throws Throwable {
+    programs.put(name , new ProgramDescription(mainClass, description));
+  }
+    
+  /**
+   * This is a driver for the example programs.
+   * It looks at the first command line argument and tries to find an
+   * example program with that name.
+   * If it is found, it calls the main method in that class with the rest 
+   * of the command line arguments.
+   * @param args The argument from the user. args[0] is the command to run.
+   * @throws NoSuchMethodException 
+   * @throws SecurityException 
+   * @throws IllegalAccessException 
+   * @throws IllegalArgumentException 
+   * @throws Throwable Anything thrown by the example program's main
+   */
+  public void driver(String[] args) 
+    throws Throwable 
+  {
+    // Make sure they gave us a program name.
+    if (args.length == 0) {
+      System.out.println("An example program must be given as the" + 
+                         " first argument.");
+      printUsage(programs);
+      return;
+    }
+	
+    // And that it is good.
+    ProgramDescription pgm = programs.get(args[0]);
+    if (pgm == null) {
+      System.out.println("Unknown program '" + args[0] + "' chosen.");
+      printUsage(programs);
+      return;
+    }
+	
+    // Remove the leading argument and call main
+    String[] new_args = new String[args.length - 1];
+    for(int i=1; i < args.length; ++i) {
+      new_args[i-1] = args[i];
     }
+    pgm.invoke(new_args);
+  }
     
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ReflectionUtils.java Mon Apr 16 14:44:35 2007
@@ -34,144 +34,144 @@
 
 public class ReflectionUtils {
     
-    private static final Class[] emptyArray = new Class[]{};
-    /** 
-     * Cache of constructors for each class. Pins the classes so they
-     * can't be garbage collected until ReflectionUtils can be collected.
-     */
-    private static final Map<Class<?>, Constructor<?>> CONSTRUCTOR_CACHE = 
-        new ConcurrentHashMap<Class<?>, Constructor<?>>();
+  private static final Class[] emptyArray = new Class[]{};
+  /** 
+   * Cache of constructors for each class. Pins the classes so they
+   * can't be garbage collected until ReflectionUtils can be collected.
+   */
+  private static final Map<Class<?>, Constructor<?>> CONSTRUCTOR_CACHE = 
+    new ConcurrentHashMap<Class<?>, Constructor<?>>();
 
-    /**
-     * Check and set 'configuration' if necessary.
-     * 
-     * @param theObject object for which to set configuration
-     * @param conf Configuration
-     */
-    public static void setConf(Object theObject, Configuration conf) {
-      if (conf != null) {
-        if (theObject instanceof Configurable) {
-            ((Configurable) theObject).setConf(conf);
-        }
-        if (conf instanceof JobConf && 
-                theObject instanceof JobConfigurable) {
-            ((JobConfigurable)theObject).configure((JobConf) conf);
-        }
+  /**
+   * Check and set 'configuration' if necessary.
+   * 
+   * @param theObject object for which to set configuration
+   * @param conf Configuration
+   */
+  public static void setConf(Object theObject, Configuration conf) {
+    if (conf != null) {
+      if (theObject instanceof Configurable) {
+        ((Configurable) theObject).setConf(conf);
+      }
+      if (conf instanceof JobConf && 
+          theObject instanceof JobConfigurable) {
+        ((JobConfigurable)theObject).configure((JobConf) conf);
       }
     }
+  }
 
-    /** Create an object for the given class and initialize it from conf
-     * 
-     * @param theClass class of which an object is created
-     * @param conf Configuration
-     * @return a new object
-     */
-    public static Object newInstance(Class theClass, Configuration conf) {
-        Object result;
-        try {
-            Constructor meth = CONSTRUCTOR_CACHE.get(theClass);
-            if (meth == null) {
-              meth = theClass.getDeclaredConstructor(emptyArray);
-              meth.setAccessible(true);
-              CONSTRUCTOR_CACHE.put(theClass, meth);
-            }
-            result = meth.newInstance();
-        } catch (Exception e) {
-            throw new RuntimeException(e);
-        }
-        setConf(result, conf);
-        return result;
-    }
+  /** Create an object for the given class and initialize it from conf
+   * 
+   * @param theClass class of which an object is created
+   * @param conf Configuration
+   * @return a new object
+   */
+  public static Object newInstance(Class theClass, Configuration conf) {
+    Object result;
+    try {
+      Constructor meth = CONSTRUCTOR_CACHE.get(theClass);
+      if (meth == null) {
+        meth = theClass.getDeclaredConstructor(emptyArray);
+        meth.setAccessible(true);
+        CONSTRUCTOR_CACHE.put(theClass, meth);
+      }
+      result = meth.newInstance();
+    } catch (Exception e) {
+      throw new RuntimeException(e);
+    }
+    setConf(result, conf);
+    return result;
+  }
 
-    static private ThreadMXBean threadBean = 
-      ManagementFactory.getThreadMXBean();
+  static private ThreadMXBean threadBean = 
+    ManagementFactory.getThreadMXBean();
     
-    public static void setContentionTracing(boolean val) {
-      threadBean.setThreadContentionMonitoringEnabled(val);
-    }
-    
-    private static String getTaskName(long id, String name) {
-      if (name == null) {
-        return Long.toString(id);
+  public static void setContentionTracing(boolean val) {
+    threadBean.setThreadContentionMonitoringEnabled(val);
+  }
+    
+  private static String getTaskName(long id, String name) {
+    if (name == null) {
+      return Long.toString(id);
+    }
+    return id + " (" + name + ")";
+  }
+    
+  /**
+   * Print all of the thread's information and stack traces.
+   * 
+   * @param stream the stream to
+   * @param title a string title for the stack trace
+   */
+  public static void printThreadInfo(PrintWriter stream,
+                                     String title) {
+    final int STACK_DEPTH = 20;
+    boolean contention = threadBean.isThreadContentionMonitoringEnabled();
+    long[] threadIds = threadBean.getAllThreadIds();
+    stream.println("Process Thread Dump: " + title);
+    stream.println(threadIds.length + " active threads");
+    for (long tid: threadIds) {
+      ThreadInfo info = threadBean.getThreadInfo(tid, STACK_DEPTH);
+      if (info == null) {
+        stream.println("  Inactive");
+        continue;
       }
-      return id + " (" + name + ")";
-    }
-    
-    /**
-     * Print all of the thread's information and stack traces.
-     * 
-     * @param stream the stream to
-     * @param title a string title for the stack trace
-     */
-    public static void printThreadInfo(PrintWriter stream,
-                                        String title) {
-      final int STACK_DEPTH = 20;
-      boolean contention = threadBean.isThreadContentionMonitoringEnabled();
-      long[] threadIds = threadBean.getAllThreadIds();
-      stream.println("Process Thread Dump: " + title);
-      stream.println(threadIds.length + " active threads");
-      for (long tid: threadIds) {
-        ThreadInfo info = threadBean.getThreadInfo(tid, STACK_DEPTH);
-        if (info == null) {
-          stream.println("  Inactive");
-          continue;
-        }
-        stream.println("Thread " + 
-                       getTaskName(info.getThreadId(),
-                                   info.getThreadName()) + ":");
-        Thread.State state = info.getThreadState();
-        stream.println("  State: " + state);
-        stream.println("  Blocked count: " + info.getBlockedCount());
-        stream.println("  Waited count: " + info.getWaitedCount());
-        if (contention) {
-          stream.println("  Blocked time: " + info.getBlockedTime());
-          stream.println("  Waited time: " + info.getWaitedTime());
-        }
-        if (state == Thread.State.WAITING) {
-          stream.println("  Waiting on " + info.getLockName());
-        } else  if (state == Thread.State.BLOCKED) {
-          stream.println("  Blocked on " + info.getLockName());
-          stream.println("  Blocked by " + 
-                         getTaskName(info.getLockOwnerId(),
-                                     info.getLockOwnerName()));
-        }
-        stream.println("  Stack:");
-        for (StackTraceElement frame: info.getStackTrace()) {
-          stream.println("    " + frame.toString());
-        }
+      stream.println("Thread " + 
+                     getTaskName(info.getThreadId(),
+                                 info.getThreadName()) + ":");
+      Thread.State state = info.getThreadState();
+      stream.println("  State: " + state);
+      stream.println("  Blocked count: " + info.getBlockedCount());
+      stream.println("  Waited count: " + info.getWaitedCount());
+      if (contention) {
+        stream.println("  Blocked time: " + info.getBlockedTime());
+        stream.println("  Waited time: " + info.getWaitedTime());
+      }
+      if (state == Thread.State.WAITING) {
+        stream.println("  Waiting on " + info.getLockName());
+      } else  if (state == Thread.State.BLOCKED) {
+        stream.println("  Blocked on " + info.getLockName());
+        stream.println("  Blocked by " + 
+                       getTaskName(info.getLockOwnerId(),
+                                   info.getLockOwnerName()));
+      }
+      stream.println("  Stack:");
+      for (StackTraceElement frame: info.getStackTrace()) {
+        stream.println("    " + frame.toString());
       }
-      stream.flush();
     }
+    stream.flush();
+  }
     
-    private static long previousLogTime = 0;
+  private static long previousLogTime = 0;
     
-    /**
-     * Log the current thread stacks at INFO level.
-     * @param log the logger that logs the stack trace
-     * @param title a descriptive title for the call stacks
-     * @param minInterval the minimum time from the last 
-     */
-    public static synchronized void logThreadInfo(Log log,
-                                                  String title,
-                                                  long minInterval) {
-      if (log.isInfoEnabled()) {
-        long now = System.currentTimeMillis();
-        if (now - previousLogTime >= minInterval * 1000) {
-          previousLogTime = now;
-          ByteArrayOutputStream buffer = new ByteArrayOutputStream();
-          printThreadInfo(new PrintWriter(buffer), title);
-          log.info(buffer.toString());
-        }
+  /**
+   * Log the current thread stacks at INFO level.
+   * @param log the logger that logs the stack trace
+   * @param title a descriptive title for the call stacks
+   * @param minInterval the minimum time from the last 
+   */
+  public static synchronized void logThreadInfo(Log log,
+                                                String title,
+                                                long minInterval) {
+    if (log.isInfoEnabled()) {
+      long now = System.currentTimeMillis();
+      if (now - previousLogTime >= minInterval * 1000) {
+        previousLogTime = now;
+        ByteArrayOutputStream buffer = new ByteArrayOutputStream();
+        printThreadInfo(new PrintWriter(buffer), title);
+        log.info(buffer.toString());
       }
     }
+  }
 
-    // methods to support testing
-    static void clearCache() {
-      CONSTRUCTOR_CACHE.clear();
-    }
-    
-    static int getCacheSize() {
-      return CONSTRUCTOR_CACHE.size();
-    }
+  // methods to support testing
+  static void clearCache() {
+    CONSTRUCTOR_CACHE.clear();
+  }
+    
+  static int getCacheSize() {
+    return CONSTRUCTOR_CACHE.size();
+  }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/StringUtils.java Mon Apr 16 14:44:35 2007
@@ -135,7 +135,7 @@
     StringBuffer retString = new StringBuffer();
     for (int i = 0; i < bytes.length; ++i) {
       retString.append(Integer.toHexString(0x0100 + (bytes[i] & 0x00FF))
-          .substring(1));
+                       .substring(1));
     }
     return retString.toString();
   }
@@ -241,7 +241,7 @@
    * @return formatted value. 
    */
   public static String getFormattedTimeWithDiff(DateFormat dateFormat, 
-      long finishTime, long startTime){
+                                                long finishTime, long startTime){
     StringBuffer buf = new StringBuffer();
     if( 0 != finishTime ) {
       buf.append(dateFormat.format(new Date(finishTime)));
@@ -267,4 +267,4 @@
     }
     return (String[])values.toArray(new String[values.size()]);
   }
-}
\ No newline at end of file
+}

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/Tool.java Mon Apr 16 14:44:35 2007
@@ -27,11 +27,11 @@
  *
  */
 public interface Tool extends Configurable {
-    /**
-     * execute the command with the given arguments
-     * @param args command specific arguments
-     * @return exit code
-     * @throws Exception
-     */
-    int run( String [] args ) throws Exception;
+  /**
+   * execute the command with the given arguments
+   * @param args command specific arguments
+   * @return exit code
+   * @throws Exception
+   */
+  int run( String [] args ) throws Exception;
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/ToolBase.java Mon Apr 16 14:44:35 2007
@@ -82,111 +82,111 @@
  *
  */
 public abstract class ToolBase implements Tool {
-    private static final Log LOG = LogFactory.getLog(
-            "org.apache.hadoop.util.ToolBase");
-    public Configuration conf;
+  private static final Log LOG = LogFactory.getLog(
+                                                   "org.apache.hadoop.util.ToolBase");
+  public Configuration conf;
 
-    public void setConf(Configuration conf) {
-        this.conf = conf;
-    }
+  public void setConf(Configuration conf) {
+    this.conf = conf;
+  }
 
-    public Configuration getConf() {
-        return conf;
-    }
+  public Configuration getConf() {
+    return conf;
+  }
     
-    /*
-     * Specify properties of each generic option
-     */
-    static private Options buildGeneralOptions() {
-        Option fs = OptionBuilder.withArgName("local|namenode:port")
-                                 .hasArg()
-                                 .withDescription("specify a namenode")
-                                 .create("fs");
-        Option jt = OptionBuilder.withArgName("local|jobtracker:port")
-                                 .hasArg()
-                                 .withDescription("specify a job tracker")
-                                 .create("jt");
-        Option oconf = OptionBuilder.withArgName("configuration file")
-                .hasArg()
-                .withDescription("specify an application configuration file" )
-                .create("conf");
-        Option property = OptionBuilder.withArgName("property=value")
-                              .hasArgs()
-                              .withArgPattern("=", 1)
-                              .withDescription("use value for given property")
-                              .create('D');
-        Options opts = new Options();
-        opts.addOption(fs);
-        opts.addOption(jt);
-        opts.addOption(oconf);
-        opts.addOption(property);
+  /*
+   * Specify properties of each generic option
+   */
+  static private Options buildGeneralOptions() {
+    Option fs = OptionBuilder.withArgName("local|namenode:port")
+      .hasArg()
+      .withDescription("specify a namenode")
+      .create("fs");
+    Option jt = OptionBuilder.withArgName("local|jobtracker:port")
+      .hasArg()
+      .withDescription("specify a job tracker")
+      .create("jt");
+    Option oconf = OptionBuilder.withArgName("configuration file")
+      .hasArg()
+      .withDescription("specify an application configuration file" )
+      .create("conf");
+    Option property = OptionBuilder.withArgName("property=value")
+      .hasArgs()
+      .withArgPattern("=", 1)
+      .withDescription("use value for given property")
+      .create('D');
+    Options opts = new Options();
+    opts.addOption(fs);
+    opts.addOption(jt);
+    opts.addOption(oconf);
+    opts.addOption(property);
         
-        return opts;
-    }
+    return opts;
+  }
     
-    /*
-     * Modify configuration according user-specified generic options
-     * @param conf Configuration to be modified
-     * @param line User-specified generic options
-     */
-    static private void processGeneralOptions( Configuration conf,
-                                               CommandLine line ) {
-        if(line.hasOption("fs")) {
-            conf.set("fs.default.name", line.getOptionValue("fs"));
-        }
+  /*
+   * Modify configuration according user-specified generic options
+   * @param conf Configuration to be modified
+   * @param line User-specified generic options
+   */
+  static private void processGeneralOptions( Configuration conf,
+                                             CommandLine line ) {
+    if(line.hasOption("fs")) {
+      conf.set("fs.default.name", line.getOptionValue("fs"));
+    }
         
-        if(line.hasOption("jt")) {
-            conf.set("mapred.job.tracker", line.getOptionValue("jt"));
-        }
-        if(line.hasOption("conf")) {
-            conf.addFinalResource(new Path(line.getOptionValue("conf")));
-        }
-        if(line.hasOption('D')) {
-            String[] property = line.getOptionValues('D');
-            for(int i=0; i<property.length-1; i=i+2) {
-                if(property[i]!=null)
-                    conf.set(property[i], property[i+1]);
-            }
-         }           
+    if(line.hasOption("jt")) {
+      conf.set("mapred.job.tracker", line.getOptionValue("jt"));
+    }
+    if(line.hasOption("conf")) {
+      conf.addFinalResource(new Path(line.getOptionValue("conf")));
     }
+    if(line.hasOption('D')) {
+      String[] property = line.getOptionValues('D');
+      for(int i=0; i<property.length-1; i=i+2) {
+        if(property[i]!=null)
+          conf.set(property[i], property[i+1]);
+      }
+    }           
+  }
  
-    /**
-     * Parse the user-specified options, get the generic options, and modify
-     * configuration accordingly
-     * @param conf Configuration to be modified
-     * @param args User-specified arguments
-     * @return Commoand-specific arguments
-     */
-    static private String[] parseGeneralOptions( Configuration conf, 
-                 String[] args ) {
-        Options opts = buildGeneralOptions();
-        CommandLineParser parser = new GnuParser();
-        try {
-          CommandLine line = parser.parse( opts, args, true );
-          processGeneralOptions( conf, line );
-          return line.getArgs();
-        } catch(ParseException e) {
-          LOG.warn("options parsing failed: "+e.getMessage());
+  /**
+   * Parse the user-specified options, get the generic options, and modify
+   * configuration accordingly
+   * @param conf Configuration to be modified
+   * @param args User-specified arguments
+   * @return Commoand-specific arguments
+   */
+  static private String[] parseGeneralOptions( Configuration conf, 
+                                               String[] args ) {
+    Options opts = buildGeneralOptions();
+    CommandLineParser parser = new GnuParser();
+    try {
+      CommandLine line = parser.parse( opts, args, true );
+      processGeneralOptions( conf, line );
+      return line.getArgs();
+    } catch(ParseException e) {
+      LOG.warn("options parsing failed: "+e.getMessage());
 
-          HelpFormatter formatter = new HelpFormatter();
-          formatter.printHelp("general options are: ", opts);
-        }
-        return args;
+      HelpFormatter formatter = new HelpFormatter();
+      formatter.printHelp("general options are: ", opts);
     }
+    return args;
+  }
 
-    /**
-     * Work as a main program: execute a command and handle exception if any
-     * @param conf Application default configuration
-     * @param args User-specified arguments
-     * @throws Exception
-     * @return exit code to be passed to a caller. General contract is that code
-     * equal zero signifies a normal return, negative values signify errors, and
-     * positive non-zero values can be used to return application-specific codes.
-     */
-    public final int doMain(Configuration conf, String[] args) throws Exception {
-        String [] commandOptions = parseGeneralOptions(conf, args);
-        setConf(conf);
-        return this.run(commandOptions);
-    }
+  /**
+   * Work as a main program: execute a command and handle exception if any
+   * @param conf Application default configuration
+   * @param args User-specified arguments
+   * @throws Exception
+   * @return exit code to be passed to a caller. General contract is that code
+   * equal zero signifies a normal return, negative values signify errors, and
+   * positive non-zero values can be used to return application-specific codes.
+   */
+  public final int doMain(Configuration conf, String[] args) throws Exception {
+    String [] commandOptions = parseGeneralOptions(conf, args);
+    setConf(conf);
+    return this.run(commandOptions);
+  }
 
 }

Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/XMLUtils.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/XMLUtils.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/XMLUtils.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/XMLUtils.java Mon Apr 16 14:44:35 2007
@@ -38,17 +38,17 @@
    * @throws TransformerException
    */
   public static void transform(
-          InputStream styleSheet, InputStream xml, Writer out
-          ) 
-  throws TransformerConfigurationException, TransformerException {
+                               InputStream styleSheet, InputStream xml, Writer out
+                               ) 
+    throws TransformerConfigurationException, TransformerException {
     // Instantiate a TransformerFactory
     TransformerFactory tFactory = TransformerFactory.newInstance();
 
     // Use the TransformerFactory to process the  
     // stylesheet and generate a Transformer
     Transformer transformer = tFactory.newTransformer(
-                                  new StreamSource(styleSheet)
-                                );
+                                                      new StreamSource(styleSheet)
+                                                      );
 
     // Use the Transformer to transform an XML Source 
     // and send the output to a Result object.

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java Mon Apr 16 14:44:35 2007
@@ -44,7 +44,7 @@
  */
 public class ClusterTestDFSNamespaceLogging extends TestCase implements FSConstants {
   private static final Log LOG =
-      LogFactory.getLog("org.apache.hadoop.dfs.ClusterTestDFS");
+    LogFactory.getLog("org.apache.hadoop.dfs.ClusterTestDFS");
 
   private static Configuration conf = new Configuration();
 
@@ -89,10 +89,10 @@
     conf.setBoolean("test.dfs.same.host.targets.allowed", true);
   }
 
- /**
-  * Remove old files from temp area used by this test case and be sure
-  * base temp directory can be created.
-  */
+  /**
+   * Remove old files from temp area used by this test case and be sure
+   * base temp directory can be created.
+   */
   protected void prepareTempFileSpace() {
     if (baseDir.exists()) {
       try { // start from a blank state
@@ -103,8 +103,8 @@
     baseDir.mkdirs();
     if (!baseDir.isDirectory()) {
       throw new RuntimeException("Value of root directory property" 
-          + "test.dfs.data for dfs test is not a directory: "
-          + baseDirSpecified);
+                                 + "test.dfs.data for dfs test is not a directory: "
+                                 + baseDirSpecified);
     }
   }
 
@@ -115,8 +115,8 @@
    * @throws Exception
    */
   public void testFsPseudoDistributed() throws Exception {
-	  // test on a small cluster with 3 data nodes
-	  testFsPseudoDistributed(3);
+    // test on a small cluster with 3 data nodes
+    testFsPseudoDistributed(3);
   }
   
   private void testFsPseudoDistributed( int datanodeNum ) throws Exception {
@@ -224,11 +224,11 @@
     byte[] buffer = new byte[BUFFER_SIZE];
     UTF8 testFileName = new UTF8(filename); // hardcode filename
     OutputStream nos;
-	nos = dfsClient.create(testFileName, false);
+    nos = dfsClient.create(testFileName, false);
     try {
       for (long nBytesWritten = 0L;
-                nBytesWritten < fileSize;
-                nBytesWritten += buffer.length) {
+           nBytesWritten < fileSize;
+           nBytesWritten += buffer.length) {
         if ((nBytesWritten + buffer.length) > fileSize) {
           int pb = (int) (fileSize - nBytesWritten);
           byte[] bufferPartial = new byte[pb];
@@ -250,95 +250,95 @@
   }
 
   private void assertMkdirs( String fileName, boolean failed ) {
-	  assertHasLogged("NameNode.mkdirs: " +fileName, DIR_LOG_HEADER_LEN+1);
-	  assertHasLogged("NameSystem.mkdirs: "+fileName, DIR_LOG_HEADER_LEN);
-	  if( failed )
-		assertHasLogged("FSDirectory.mkdirs: "
-        			+"failed to create directory "+fileName, DIR_LOG_HEADER_LEN);
-	  else
-	    assertHasLogged( "FSDirectory.mkdirs: created directory "+fileName, DIR_LOG_HEADER_LEN);
+    assertHasLogged("NameNode.mkdirs: " +fileName, DIR_LOG_HEADER_LEN+1);
+    assertHasLogged("NameSystem.mkdirs: "+fileName, DIR_LOG_HEADER_LEN);
+    if( failed )
+      assertHasLogged("FSDirectory.mkdirs: "
+                      +"failed to create directory "+fileName, DIR_LOG_HEADER_LEN);
+    else
+      assertHasLogged( "FSDirectory.mkdirs: created directory "+fileName, DIR_LOG_HEADER_LEN);
   }
   
   private void assertCreate( String fileName, int filesize, boolean failed ) {
-	  assertHasLogged("NameNode.create: file "+fileName, DIR_LOG_HEADER_LEN+1);
-	  assertHasLogged("NameSystem.startFile: file "+fileName, DIR_LOG_HEADER_LEN);
-	  if( failed ) {
-		assertHasLogged("NameSystem.startFile: "
-            		  +"failed to create file " + fileName, DIR_LOG_HEADER_LEN);
-	  } else {
-	    assertHasLogged("NameSystem.allocateBlock: "+fileName, BLOCK_LOG_HEADER_LEN);
-	    int blockNum = (filesize/BLOCK_SIZE*BLOCK_SIZE==filesize)?
-		  filesize/BLOCK_SIZE : 1+filesize/BLOCK_SIZE;
-	    for( int i=1; i<blockNum; i++) {
-		  assertHasLogged("NameNode.addBlock: file "+fileName, BLOCK_LOG_HEADER_LEN+1);
-		  assertHasLogged("NameSystem.getAdditionalBlock: file "+fileName, BLOCK_LOG_HEADER_LEN);
-		  assertHasLogged("NameSystem.allocateBlock: "+fileName, BLOCK_LOG_HEADER_LEN);
-	    }
-	    assertHasLogged("NameNode.complete: "+fileName, DIR_LOG_HEADER_LEN+1);
-	    assertHasLogged("NameSystem.completeFile: "+fileName, DIR_LOG_HEADER_LEN);
-	    assertHasLogged("FSDirectory.addFile: "+fileName+" with "
-			  +blockNum+" blocks is added to the file system", DIR_LOG_HEADER_LEN);
-	    assertHasLogged("NameSystem.completeFile: "+fileName
-			  +" is removed from pendingCreates", DIR_LOG_HEADER_LEN);
-	  }
+    assertHasLogged("NameNode.create: file "+fileName, DIR_LOG_HEADER_LEN+1);
+    assertHasLogged("NameSystem.startFile: file "+fileName, DIR_LOG_HEADER_LEN);
+    if( failed ) {
+      assertHasLogged("NameSystem.startFile: "
+                      +"failed to create file " + fileName, DIR_LOG_HEADER_LEN);
+    } else {
+      assertHasLogged("NameSystem.allocateBlock: "+fileName, BLOCK_LOG_HEADER_LEN);
+      int blockNum = (filesize/BLOCK_SIZE*BLOCK_SIZE==filesize)?
+        filesize/BLOCK_SIZE : 1+filesize/BLOCK_SIZE;
+      for( int i=1; i<blockNum; i++) {
+        assertHasLogged("NameNode.addBlock: file "+fileName, BLOCK_LOG_HEADER_LEN+1);
+        assertHasLogged("NameSystem.getAdditionalBlock: file "+fileName, BLOCK_LOG_HEADER_LEN);
+        assertHasLogged("NameSystem.allocateBlock: "+fileName, BLOCK_LOG_HEADER_LEN);
+      }
+      assertHasLogged("NameNode.complete: "+fileName, DIR_LOG_HEADER_LEN+1);
+      assertHasLogged("NameSystem.completeFile: "+fileName, DIR_LOG_HEADER_LEN);
+      assertHasLogged("FSDirectory.addFile: "+fileName+" with "
+                      +blockNum+" blocks is added to the file system", DIR_LOG_HEADER_LEN);
+      assertHasLogged("NameSystem.completeFile: "+fileName
+                      +" is removed from pendingCreates", DIR_LOG_HEADER_LEN);
+    }
   }
   
   private void assertDelete( String fileName, boolean failed ) {
-	  assertHasLogged("NameNode.delete: "+fileName, DIR_LOG_HEADER_LEN+1);
-      assertHasLogged("NameSystem.delete: "+fileName, DIR_LOG_HEADER_LEN);
-      assertHasLogged("FSDirectory.delete: "+fileName, DIR_LOG_HEADER_LEN);
-      if( failed )
-        assertHasLogged("FSDirectory.unprotectedDelete: "
-            +"failed to remove "+fileName, DIR_LOG_HEADER_LEN );
-      else
-        assertHasLogged("FSDirectory.unprotectedDelete: "
-            +fileName+" is removed", DIR_LOG_HEADER_LEN);
+    assertHasLogged("NameNode.delete: "+fileName, DIR_LOG_HEADER_LEN+1);
+    assertHasLogged("NameSystem.delete: "+fileName, DIR_LOG_HEADER_LEN);
+    assertHasLogged("FSDirectory.delete: "+fileName, DIR_LOG_HEADER_LEN);
+    if( failed )
+      assertHasLogged("FSDirectory.unprotectedDelete: "
+                      +"failed to remove "+fileName, DIR_LOG_HEADER_LEN );
+    else
+      assertHasLogged("FSDirectory.unprotectedDelete: "
+                      +fileName+" is removed", DIR_LOG_HEADER_LEN);
   }
   
   private void assertRename( String src, String dst, boolean failed ) {
-	  assertHasLogged("NameNode.rename: "+src+" to "+dst, DIR_LOG_HEADER_LEN+1);
-	  assertHasLogged("NameSystem.renameTo: "+src+" to "+dst, DIR_LOG_HEADER_LEN );
-	  assertHasLogged("FSDirectory.renameTo: "+src+" to "+dst, DIR_LOG_HEADER_LEN );
-	  if( failed )
-		assertHasLogged("FSDirectory.unprotectedRenameTo: "
-                         +"failed to rename "+src+" to "+dst, DIR_LOG_HEADER_LEN);
-	  else
-	    assertHasLogged("FSDirectory.unprotectedRenameTo: "
-                       +src+" is renamed to "+dst, DIR_LOG_HEADER_LEN );
+    assertHasLogged("NameNode.rename: "+src+" to "+dst, DIR_LOG_HEADER_LEN+1);
+    assertHasLogged("NameSystem.renameTo: "+src+" to "+dst, DIR_LOG_HEADER_LEN );
+    assertHasLogged("FSDirectory.renameTo: "+src+" to "+dst, DIR_LOG_HEADER_LEN );
+    if( failed )
+      assertHasLogged("FSDirectory.unprotectedRenameTo: "
+                      +"failed to rename "+src+" to "+dst, DIR_LOG_HEADER_LEN);
+    else
+      assertHasLogged("FSDirectory.unprotectedRenameTo: "
+                      +src+" is renamed to "+dst, DIR_LOG_HEADER_LEN );
   }
   
   private void assertHasLogged( String target, int headerLen ) {
-	  String line;
-	  boolean notFound = true;
-	  try {
-	      while( notFound && (line=logfh.readLine()) != null ) {
-		      if(line.length()>headerLen && line.startsWith(target, headerLen))
-			      notFound = false;
-	      }
-	  } catch(java.io.IOException e) {
-		  throw new AssertionFailedError("error reading the log file");
-	  }
-	  if(notFound) {
-		  throw new AssertionFailedError(target+" not logged");
-	  }
+    String line;
+    boolean notFound = true;
+    try {
+      while( notFound && (line=logfh.readLine()) != null ) {
+        if(line.length()>headerLen && line.startsWith(target, headerLen))
+          notFound = false;
+      }
+    } catch(java.io.IOException e) {
+      throw new AssertionFailedError("error reading the log file");
+    }
+    if(notFound) {
+      throw new AssertionFailedError(target+" not logged");
+    }
   }
 
   //
   //     modify config for test
   //
   private void configureDFS() throws IOException {
-	// set given config param to override other config settings
-	conf.setInt("dfs.block.size", BLOCK_SIZE);
-	// verify that config changed
-	assertTrue(BLOCK_SIZE == conf.getInt("dfs.block.size", 2)); // 2 is an intentional obviously-wrong block size
-	// downsize for testing (just to save resources)
-	conf.setInt("dfs.namenode.handler.count", 3);
-	conf.setLong("dfs.blockreport.intervalMsec", 50*1000L);
-	conf.setLong("dfs.datanode.startupMsec", 15*1000L);
-	conf.setInt("dfs.replication", 2);
-	System.setProperty("hadoop.log.dir", baseDirSpecified+"/logs");
-	conf.setInt("hadoop.logfile.count", 1);
-	conf.setInt("hadoop.logfile.size", 1000000000);
+    // set given config param to override other config settings
+    conf.setInt("dfs.block.size", BLOCK_SIZE);
+    // verify that config changed
+    assertTrue(BLOCK_SIZE == conf.getInt("dfs.block.size", 2)); // 2 is an intentional obviously-wrong block size
+    // downsize for testing (just to save resources)
+    conf.setInt("dfs.namenode.handler.count", 3);
+    conf.setLong("dfs.blockreport.intervalMsec", 50*1000L);
+    conf.setLong("dfs.datanode.startupMsec", 15*1000L);
+    conf.setInt("dfs.replication", 2);
+    System.setProperty("hadoop.log.dir", baseDirSpecified+"/logs");
+    conf.setInt("hadoop.logfile.count", 1);
+    conf.setInt("hadoop.logfile.size", 1000000000);
   }
   
   private void startDFS( int dataNodeNum) throws IOException {
@@ -348,73 +348,73 @@
     conf.set("fs.default.name", nameNodeSocketAddr);
     
     String nameFSDir = baseDirSpecified + "/name";
-	conf.set("dfs.name.dir", nameFSDir);
+    conf.set("dfs.name.dir", nameFSDir);
 	
     NameNode.format(conf);
     
     nameNodeDaemon = new NameNode("localhost", nameNodePort, conf);
 
-     //
-      //        start DataNodes
-      //
-      for (int i = 0; i < dataNodeNum; i++) {
-        // uniquely config real fs path for data storage for this datanode
-        String dataDir[] = new String[1];
-        dataDir[0] = baseDirSpecified + "/datanode" + i;
-        conf.set("dfs.data.dir", dataDir[0]);
-        DataNode dn = DataNode.makeInstance(dataDir, conf);
-        if (dn != null) {
-          dataNodeDaemons.add(dn);
-          (new Thread(dn, "DataNode" + i + ": " + dataDir[0])).start();
-        }
+    //
+    //        start DataNodes
+    //
+    for (int i = 0; i < dataNodeNum; i++) {
+      // uniquely config real fs path for data storage for this datanode
+      String dataDir[] = new String[1];
+      dataDir[0] = baseDirSpecified + "/datanode" + i;
+      conf.set("dfs.data.dir", dataDir[0]);
+      DataNode dn = DataNode.makeInstance(dataDir, conf);
+      if (dn != null) {
+        dataNodeDaemons.add(dn);
+        (new Thread(dn, "DataNode" + i + ": " + dataDir[0])).start();
       }
+    }
 	         
-      assertTrue("incorrect datanodes for test to continue",
-            (dataNodeDaemons.size() == dataNodeNum));
-      //
-      //          wait for datanodes to report in
-      try {
-        awaitQuiescence();
-      } catch( InterruptedException e) {
-    	  e.printStackTrace();
-      }
+    assertTrue("incorrect datanodes for test to continue",
+               (dataNodeDaemons.size() == dataNodeNum));
+    //
+    //          wait for datanodes to report in
+    try {
+      awaitQuiescence();
+    } catch( InterruptedException e) {
+      e.printStackTrace();
+    }
       
-      //  act as if namenode is a remote process
-      dfsClient = new DFSClient(new InetSocketAddress("localhost", nameNodePort), conf);
+    //  act as if namenode is a remote process
+    dfsClient = new DFSClient(new InetSocketAddress("localhost", nameNodePort), conf);
   }
 
   private void shutdownDFS() {
-      // shutdown client
-      if (dfsClient != null) {
-        try {
-          msg("close down subthreads of DFSClient");
-          dfsClient.close();
-        } catch (Exception ignored) { }
-        msg("finished close down of DFSClient");
-      }
+    // shutdown client
+    if (dfsClient != null) {
+      try {
+        msg("close down subthreads of DFSClient");
+        dfsClient.close();
+      } catch (Exception ignored) { }
+      msg("finished close down of DFSClient");
+    }
 
-      //
-      // shut down datanode daemons (this takes advantage of being same-process)
-      msg("begin shutdown of all datanode daemons" );
+    //
+    // shut down datanode daemons (this takes advantage of being same-process)
+    msg("begin shutdown of all datanode daemons" );
 
-      for (int i = 0; i < dataNodeDaemons.size(); i++) {
-        DataNode dataNode = (DataNode) dataNodeDaemons.get(i);
-        try {
-          dataNode.shutdown();
-        } catch (Exception e) {
-           msg("ignoring exception during (all) datanode shutdown, e=" + e);
-        }
-      }
-      msg("finished shutdown of all datanode daemons");
-      
-      // shutdown namenode
-      msg("begin shutdown of namenode daemon");
+    for (int i = 0; i < dataNodeDaemons.size(); i++) {
+      DataNode dataNode = (DataNode) dataNodeDaemons.get(i);
       try {
-        nameNodeDaemon.stop();
+        dataNode.shutdown();
       } catch (Exception e) {
-        msg("ignoring namenode shutdown exception=" + e);
+        msg("ignoring exception during (all) datanode shutdown, e=" + e);
       }
-      msg("finished shutdown of namenode daemon");
+    }
+    msg("finished shutdown of all datanode daemons");
+      
+    // shutdown namenode
+    msg("begin shutdown of namenode daemon");
+    try {
+      nameNodeDaemon.stop();
+    } catch (Exception e) {
+      msg("ignoring namenode shutdown exception=" + e);
+    }
+    msg("finished shutdown of namenode daemon");
   }
   
   /** Wait for the DFS datanodes to become quiescent.

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/DFSTestUtil.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/DFSTestUtil.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/DFSTestUtil.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/DFSTestUtil.java Mon Apr 16 14:44:35 2007
@@ -36,7 +36,7 @@
   
   private static Random gen = new Random();
   private static String[] dirNames = {
-      "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"
+    "zero", "one", "two", "three", "four", "five", "six", "seven", "eight", "nine"
   };
   private static Configuration conf = new Configuration();
   

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/MiniDFSCluster.java Mon Apr 16 14:44:35 2007
@@ -110,8 +110,8 @@
     conf.set("fs.default.name", "localhost:"+ Integer.toString(nameNodePort));
     conf.setInt("dfs.info.port", 0);
     if (manageDfsDirs) {
-    conf.set("dfs.name.dir", new File(base_dir, "name1").getPath()+","+
-        new File(base_dir, "name2").getPath());
+      conf.set("dfs.name.dir", new File(base_dir, "name1").getPath()+","+
+               new File(base_dir, "name2").getPath());
     }
     conf.setInt("dfs.replication", Math.min(3, numDataNodes));
     conf.setInt("dfs.safemode.extension", 0);
@@ -126,9 +126,9 @@
     
     // Start the NameNode
     String[] args = (operation == null ||
-      operation == StartupOption.FORMAT ||
-      operation == StartupOption.REGULAR) ?
-        new String[] {} : new String[] {"-"+operation.toString()};
+                     operation == StartupOption.FORMAT ||
+                     operation == StartupOption.REGULAR) ?
+      new String[] {} : new String[] {"-"+operation.toString()};
     nameNode = NameNode.createNameNode(args, conf);
     
     // Start the DataNodes
@@ -172,12 +172,12 @@
     InetSocketAddress nnAddr = nameNode.getNameNodeAddress(); 
     int nameNodePort = nnAddr.getPort(); 
     conf.set("fs.default.name", 
-      nnAddr.getHostName()+ ":" + Integer.toString(nameNodePort));
+             nnAddr.getHostName()+ ":" + Integer.toString(nameNodePort));
     
     String[] args = (operation == null ||
-      operation == StartupOption.FORMAT ||
-      operation == StartupOption.REGULAR) ?
-        new String[] {} : new String[] {"-"+operation.toString()};
+                     operation == StartupOption.FORMAT ||
+                     operation == StartupOption.REGULAR) ?
+      new String[] {} : new String[] {"-"+operation.toString()};
         
     for (int i = 0; i < numDataNodes; i++) {
       Configuration dnConf = new Configuration(conf);
@@ -188,7 +188,7 @@
         dir2.mkdirs();
         if (!dir1.isDirectory() || !dir2.isDirectory()) { 
           throw new IOException("Mkdirs failed to create directory for DataNode "
-            + i + ": " + dir1 + " or " + dir2);
+                                + i + ": " + dir1 + " or " + dir2);
         }
         dnConf.set("dfs.data.dir", dir1.getPath() + "," + dir2.getPath()); 
       }
@@ -196,7 +196,7 @@
         dnConf.set("dfs.datanode.rack", racks[i]);
       }
       System.out.println("Starting DataNode " + i + " with dfs.data.dir: " 
-        + dnConf.get("dfs.data.dir"));
+                         + dnConf.get("dfs.data.dir"));
       dataNodes.add(DataNode.createDataNode(args, dnConf));
     }
   }
@@ -211,7 +211,7 @@
   public void finalizeCluster(Configuration conf) throws Exception {
     if (nameNode == null) {
       throw new IllegalStateException("Attempting to finalize "
-        + "Namenode but it is not running");
+                                      + "Namenode but it is not running");
     }
     new DFSAdmin().doMain(conf, new String[] {"-finalizeUpgrade"});
   }
@@ -311,7 +311,7 @@
    */
   public void waitActive() throws IOException {
     InetSocketAddress addr = new InetSocketAddress("localhost",
-                                             getNameNodePort());
+                                                   getNameNodePort());
     DFSClient client = new DFSClient(addr, conf);
 
     //

Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/NNBench.java
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/NNBench.java?view=diff&rev=529410&r1=529409&r2=529410
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/NNBench.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/NNBench.java Mon Apr 16 14:44:35 2007
@@ -40,143 +40,143 @@
  * @author Nigel Daley
  */
 public class NNBench {
-    // variable initialzed from command line arguments
-    private static long startTime = 0;
-    private static int numFiles = 0;
-    private static long bytesPerBlock = 1;
-    private static long blocksPerFile = 0;
-    private static long bytesPerFile = 1;
-    private static Path baseDir = null;
-    
-    // variables initialized in main()
-    private static FileSystem fileSys = null;
-    private static Path taskDir = null;
-    private static String uniqueId = null;
-    private static byte[] buffer;
-    
-    /**
-     * Returns when the current number of seconds from the epoch equals
-     * the command line argument given by <code>-startTime</code>.
-     * This allows multiple instances of this program, running on clock
-     * synchronized nodes, to start at roughly the same time.
-     */
-    static void barrier() {
-      long sleepTime;
-      while ((sleepTime = startTime - System.currentTimeMillis()) > 0) {
+  // variable initialzed from command line arguments
+  private static long startTime = 0;
+  private static int numFiles = 0;
+  private static long bytesPerBlock = 1;
+  private static long blocksPerFile = 0;
+  private static long bytesPerFile = 1;
+  private static Path baseDir = null;
+    
+  // variables initialized in main()
+  private static FileSystem fileSys = null;
+  private static Path taskDir = null;
+  private static String uniqueId = null;
+  private static byte[] buffer;
+    
+  /**
+   * Returns when the current number of seconds from the epoch equals
+   * the command line argument given by <code>-startTime</code>.
+   * This allows multiple instances of this program, running on clock
+   * synchronized nodes, to start at roughly the same time.
+   */
+  static void barrier() {
+    long sleepTime;
+    while ((sleepTime = startTime - System.currentTimeMillis()) > 0) {
+      try {
+        Thread.sleep(sleepTime);
+      } catch (InterruptedException ex) {
+      }
+    }
+  }
+    
+  /**
+   * Create and write to a given number of files.  Repeat each remote
+   * operation until is suceeds (does not throw an exception).
+   *
+   * @return the number of exceptions caught
+   */
+  static int createWrite() {
+    int exceptions = 0;
+    FSDataOutputStream out = null;
+    boolean success = false;
+    for (int index = 0; index < numFiles; index++) {
+      do { // create file until is succeeds
         try {
-          Thread.sleep(sleepTime);
-        } catch (InterruptedException ex) {
+          out = fileSys.create(
+                               new Path(taskDir, "" + index), false, 512, (short)1, bytesPerBlock);
+          success = true;
+        } catch (IOException ioe) { success=false; exceptions++; }
+      } while (!success);
+      long toBeWritten = bytesPerFile;
+      while (toBeWritten > 0) {
+        int nbytes = (int) Math.min(buffer.length, toBeWritten);
+        toBeWritten -= nbytes;
+        try { // only try once
+          out.write(buffer, 0, nbytes);
+        } catch (IOException ioe) {
+          exceptions++;
         }
       }
+      do { // close file until is succeeds
+        try {
+          out.close();
+          success = true;
+        } catch (IOException ioe) { success=false; exceptions++; }
+      } while (!success);
     }
+    return exceptions;
+  }
     
-    /**
-     * Create and write to a given number of files.  Repeat each remote
-     * operation until is suceeds (does not throw an exception).
-     *
-     * @return the number of exceptions caught
-     */
-    static int createWrite() {
-      int exceptions = 0;
-      FSDataOutputStream out = null;
-      boolean success = false;
-      for (int index = 0; index < numFiles; index++) {
-        do { // create file until is succeeds
-          try {
-              out = fileSys.create(
-              new Path(taskDir, "" + index), false, 512, (short)1, bytesPerBlock);
-            success = true;
-          } catch (IOException ioe) { success=false; exceptions++; }
-        } while (!success);
-        long toBeWritten = bytesPerFile;
-        while (toBeWritten > 0) {
-          int nbytes = (int) Math.min(buffer.length, toBeWritten);
-          toBeWritten -= nbytes;
+  /**
+   * Open and read a given number of files.
+   *
+   * @return the number of exceptions caught
+   */
+  static int openRead() {
+    int exceptions = 0;
+    FSDataInputStream in = null;
+    for (int index = 0; index < numFiles; index++) {
+      try {
+        in = fileSys.open(new Path(taskDir, "" + index), 512);
+        long toBeRead = bytesPerFile;
+        while (toBeRead > 0) {
+          int nbytes = (int) Math.min(buffer.length, toBeRead);
+          toBeRead -= nbytes;
           try { // only try once
-            out.write(buffer, 0, nbytes);
+            in.read(buffer, 0, nbytes);
           } catch (IOException ioe) {
             exceptions++;
           }
         }
-        do { // close file until is succeeds
-          try {
-            out.close();
-            success = true;
-          } catch (IOException ioe) { success=false; exceptions++; }
-        } while (!success);
+        in.close();
+      } catch (IOException ioe) { 
+        exceptions++; 
       }
-      return exceptions;
     }
+    return exceptions;
+  }
     
-    /**
-     * Open and read a given number of files.
-     *
-     * @return the number of exceptions caught
-     */
-    static int openRead() {
-      int exceptions = 0;
-      FSDataInputStream in = null;
-      for (int index = 0; index < numFiles; index++) {
+  /**
+   * Rename a given number of files.  Repeat each remote
+   * operation until is suceeds (does not throw an exception).
+   *
+   * @return the number of exceptions caught
+   */
+  static int rename() {
+    int exceptions = 0;
+    boolean success = false;
+    for (int index = 0; index < numFiles; index++) {
+      do { // rename file until is succeeds
         try {
-          in = fileSys.open(new Path(taskDir, "" + index), 512);
-          long toBeRead = bytesPerFile;
-          while (toBeRead > 0) {
-            int nbytes = (int) Math.min(buffer.length, toBeRead);
-            toBeRead -= nbytes;
-            try { // only try once
-              in.read(buffer, 0, nbytes);
-            } catch (IOException ioe) {
-              exceptions++;
-            }
-          }
-          in.close();
-        } catch (IOException ioe) { 
-          exceptions++; 
-        }
-      }
-      return exceptions;
+          boolean result = fileSys.rename(
+                                          new Path(taskDir, "" + index), new Path(taskDir, "A" + index));
+          success = true;
+        } catch (IOException ioe) { success=false; exceptions++; }
+      } while (!success);
     }
+    return exceptions;
+  }
     
-    /**
-     * Rename a given number of files.  Repeat each remote
-     * operation until is suceeds (does not throw an exception).
-     *
-     * @return the number of exceptions caught
-     */
-    static int rename() {
-      int exceptions = 0;
-      boolean success = false;
-      for (int index = 0; index < numFiles; index++) {
-        do { // rename file until is succeeds
-          try {
-            boolean result = fileSys.rename(
-              new Path(taskDir, "" + index), new Path(taskDir, "A" + index));
-            success = true;
-          } catch (IOException ioe) { success=false; exceptions++; }
-        } while (!success);
-      }
-      return exceptions;
-    }
-    
-    /**
-     * Delete a given number of files.  Repeat each remote
-     * operation until is suceeds (does not throw an exception).
-     *
-     * @return the number of exceptions caught
-     */
-    static int delete() {
-      int exceptions = 0;
-      boolean success = false;
-      for (int index = 0; index < numFiles; index++) {
-        do { // delete file until is succeeds
-          try {
-            boolean result = fileSys.delete(new Path(taskDir, "A" + index));
-            success = true;
-          } catch (IOException ioe) { success=false; exceptions++; }
-        } while (!success);
-      }
-      return exceptions;
+  /**
+   * Delete a given number of files.  Repeat each remote
+   * operation until is suceeds (does not throw an exception).
+   *
+   * @return the number of exceptions caught
+   */
+  static int delete() {
+    int exceptions = 0;
+    boolean success = false;
+    for (int index = 0; index < numFiles; index++) {
+      do { // delete file until is succeeds
+        try {
+          boolean result = fileSys.delete(new Path(taskDir, "A" + index));
+          success = true;
+        } catch (IOException ioe) { success=false; exceptions++; }
+      } while (!success);
     }
+    return exceptions;
+  }
     
   /**
    * This launches a given namenode operation (<code>-operation</code>),
@@ -241,14 +241,14 @@
     System.out.println("   bytesPerBlock: " + bytesPerBlock);
     
     if (operation == null ||  // verify args
-      baseDir == null ||
-      numFiles < 1 ||
-      blocksPerFile < 1 ||
-      bytesPerBlock < 0) 
-    {
-      System.err.println(usage);
-      System.exit(-1);
-    }
+        baseDir == null ||
+        numFiles < 1 ||
+        blocksPerFile < 1 ||
+        bytesPerBlock < 0) 
+      {
+        System.err.println(usage);
+        System.exit(-1);
+      }
     
     JobConf jobConf = new JobConf(new Configuration(), NNBench.class);
     fileSys = FileSystem.get(jobConf);