You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/05/08 23:58:03 UTC

svn commit: r1335791 - in /hadoop/common/branches/HDFS-3092: ./ hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-extras/src/main/java/org/apache/ha...

Author: szetszwo
Date: Tue May  8 21:57:58 2012
New Revision: 1335791

URL: http://svn.apache.org/viewvc?rev=1335791&view=rev
Log:
Merge r1334158 through r1335790 from trunk.

Added:
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
      - copied unchanged from r1335790, hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCpV1.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/dev-support/
      - copied from r1335790, hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml
      - copied unchanged from r1335790, hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/dev-support/
      - copied from r1335790, hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml
      - copied unchanged from r1335790, hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml
Removed:
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/DistCp.java
Modified:
    hadoop/common/branches/HDFS-3092/   (props changed)
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/pom.xml
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/pom.xml
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

Propchange: hadoop/common/branches/HDFS-3092/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1334158-1335790

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java Tue May  8 21:57:58 2012
@@ -117,7 +117,7 @@ public class HadoopArchives implements T
     // will when running the mapreduce job.
     String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
     if (testJar != null) {
-      ((JobConf)conf).setJar(testJar);
+      this.conf.setJar(testJar);
     }
   }
 

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Tue May  8 21:57:58 2012
@@ -136,10 +136,13 @@ public class DistCp extends Configured i
 
     Job job = null;
     try {
-      metaFolder = createMetaFolderPath();
-      jobFS = metaFolder.getFileSystem(getConf());
+      synchronized(this) {
+        //Don't cleanup while we are setting up.
+        metaFolder = createMetaFolderPath();
+        jobFS = metaFolder.getFileSystem(getConf());
 
-      job = createJob();
+        job = createJob();
+      }
       createInputFileListing(job);
 
       job.submit();

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java Tue May  8 21:57:58 2012
@@ -65,9 +65,9 @@ import org.apache.hadoop.mapreduce.lib.m
 public class Logalyzer {
   // Constants
   private static Configuration fsConfig = new Configuration();
-  public static String SORT_COLUMNS = 
+  public static final String SORT_COLUMNS = 
     "logalizer.logcomparator.sort.columns";
-  public static String COLUMN_SEPARATOR = 
+  public static final String COLUMN_SEPARATOR = 
     "logalizer.logcomparator.column.separator";
   
   static {
@@ -194,7 +194,7 @@ public class Logalyzer {
     throws IOException
   {
     String destURL = FileSystem.getDefaultUri(fsConfig) + archiveDirectory;
-    DistCp.copy(new JobConf(fsConfig), logListURI, destURL, null, true, false);
+    DistCpV1.copy(new JobConf(fsConfig), logListURI, destURL, null, true, false);
   }
   
   /**

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java Tue May  8 21:57:58 2012
@@ -48,7 +48,7 @@ import org.apache.hadoop.hdfs.server.nam
 import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.security.UserGroupInformation;
-import org.apache.hadoop.tools.DistCp;
+import org.apache.hadoop.tools.DistCpV1;
 import org.apache.hadoop.util.ToolRunner;
 import org.apache.log4j.Level;
 import org.junit.Ignore;
@@ -64,7 +64,7 @@ public class TestCopyFiles extends TestC
         ).getLogger().setLevel(Level.OFF);
     ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
     ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger)DistCp.LOG).getLogger().setLevel(Level.ALL);
+    ((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL);
   }
   
   static final URI LOCAL_FS = URI.create("file:///");
@@ -267,7 +267,7 @@ public class TestCopyFiles extends TestC
     Configuration conf = new Configuration();
     FileSystem localfs = FileSystem.get(LOCAL_FS, conf);
     MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
-    ToolRunner.run(new DistCp(new Configuration()),
+    ToolRunner.run(new DistCpV1(new Configuration()),
                            new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                                          "file:///"+TEST_ROOT_DIR+"/destdat"});
     assertTrue("Source and destination directories do not match.",
@@ -287,7 +287,7 @@ public class TestCopyFiles extends TestC
       namenode = FileSystem.getDefaultUri(conf).toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-log",
                                          namenode+"/logs",
                                          namenode+"/srcdat",
@@ -320,7 +320,7 @@ public class TestCopyFiles extends TestC
         FileSystem fs = FileSystem.get(URI.create(namenode), new Configuration());
         fs.mkdirs(new Path("/empty"));
 
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-log",
                                          namenode+"/logs",
                                          namenode+"/empty",
@@ -347,7 +347,7 @@ public class TestCopyFiles extends TestC
       final String namenode = hdfs.getUri().toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(LOCAL_FS, TEST_ROOT_DIR+"/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-log",
                                          namenode+"/logs",
                                          "file:///"+TEST_ROOT_DIR+"/srcdat",
@@ -376,7 +376,7 @@ public class TestCopyFiles extends TestC
       final String namenode = FileSystem.getDefaultUri(conf).toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-log",
                                          "/logs",
                                          namenode+"/srcdat",
@@ -403,7 +403,7 @@ public class TestCopyFiles extends TestC
       final String namenode = hdfs.getUri().toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-p",
                                          "-log",
                                          namenode+"/logs",
@@ -420,7 +420,7 @@ public class TestCopyFiles extends TestC
         updateFiles(cluster.getFileSystem(), "/srcdat", files, nupdate);
         deldir(hdfs, "/logs");
 
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-prbugp", // no t to avoid preserving mod. times
                                          "-update",
                                          "-log",
@@ -433,7 +433,7 @@ public class TestCopyFiles extends TestC
                  checkUpdate(hdfs, dchkpoint, "/destdat", files, nupdate));
 
         deldir(hdfs, "/logs");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-prbugp", // no t to avoid preserving mod. times
                                          "-overwrite",
                                          "-log",
@@ -483,7 +483,7 @@ public class TestCopyFiles extends TestC
         out.close();
         
         // Run with -skipcrccheck option
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
           "-p",
           "-update",
           "-skipcrccheck",
@@ -503,7 +503,7 @@ public class TestCopyFiles extends TestC
         deldir(hdfs, "/logs");
 
         // Run without the option        
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
           "-p",
           "-update",
           "-log",
@@ -533,14 +533,14 @@ public class TestCopyFiles extends TestC
     final FileSystem localfs = FileSystem.get(LOCAL_FS, new Configuration());
     try {    
       MyFile[] files = createFiles(localfs, TEST_ROOT_DIR+"/srcdat");
-      ToolRunner.run(new DistCp(new Configuration()),
+      ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                         "file:///"+TEST_ROOT_DIR+"/src2/srcdat"});
       assertTrue("Source and destination directories do not match.",
                  checkFiles(localfs, TEST_ROOT_DIR+"/src2/srcdat", files));
   
-      assertEquals(DistCp.DuplicationException.ERROR_CODE,
-          ToolRunner.run(new DistCp(new Configuration()),
+      assertEquals(DistCpV1.DuplicationException.ERROR_CODE,
+          ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                         "file:///"+TEST_ROOT_DIR+"/src2/srcdat",
                         "file:///"+TEST_ROOT_DIR+"/destdat",}));
@@ -558,7 +558,7 @@ public class TestCopyFiles extends TestC
     try {    
       MyFile[] files = {createFile(root, fs)};
       //copy a dir with a single file
-      ToolRunner.run(new DistCp(new Configuration()),
+      ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat",
                         "file:///"+TEST_ROOT_DIR+"/destdat"});
       assertTrue("Source and destination directories do not match.",
@@ -568,7 +568,7 @@ public class TestCopyFiles extends TestC
       String fname = files[0].getName();
       Path p = new Path(root, fname);
       FileSystem.LOG.info("fname=" + fname + ", exists? " + fs.exists(p));
-      ToolRunner.run(new DistCp(new Configuration()),
+      ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
                         "file:///"+TEST_ROOT_DIR+"/dest2/"+fname});
       assertTrue("Source and destination directories do not match.",
@@ -578,17 +578,17 @@ public class TestCopyFiles extends TestC
       String[] args = {"-update", "file:///"+TEST_ROOT_DIR+"/srcdat/"+fname,
           "file:///"+TEST_ROOT_DIR+"/dest2/"+fname};
       Configuration conf = new Configuration();
-      JobConf job = new JobConf(conf, DistCp.class);
-      DistCp.Arguments distcpArgs = DistCp.Arguments.valueOf(args, conf);
+      JobConf job = new JobConf(conf, DistCpV1.class);
+      DistCpV1.Arguments distcpArgs = DistCpV1.Arguments.valueOf(args, conf);
       assertFalse("Single file update failed to skip copying even though the " 
-          + "file exists at destination.", DistCp.setup(conf, job, distcpArgs));
+          + "file exists at destination.", DistCpV1.setup(conf, job, distcpArgs));
       
       //copy single file to existing dir
       deldir(fs, TEST_ROOT_DIR+"/dest2");
       fs.mkdirs(new Path(TEST_ROOT_DIR+"/dest2"));
       MyFile[] files2 = {createFile(root, fs, 0)};
       String sname = files2[0].getName();
-      ToolRunner.run(new DistCp(new Configuration()),
+      ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"-update",
                         "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
                         "file:///"+TEST_ROOT_DIR+"/dest2/"});
@@ -596,7 +596,7 @@ public class TestCopyFiles extends TestC
           checkFiles(fs, TEST_ROOT_DIR+"/dest2", files2));     
       updateFiles(fs, TEST_ROOT_DIR+"/srcdat", files2, 1);
       //copy single file to existing dir w/ dst name conflict
-      ToolRunner.run(new DistCp(new Configuration()),
+      ToolRunner.run(new DistCpV1(new Configuration()),
           new String[] {"-update",
                         "file:///"+TEST_ROOT_DIR+"/srcdat/"+sname,
                         "file:///"+TEST_ROOT_DIR+"/dest2/"});
@@ -621,7 +621,7 @@ public class TestCopyFiles extends TestC
       namenode = FileSystem.getDefaultUri(conf).toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(URI.create(namenode), "/basedir/middle/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-basedir",
                                          "/basedir",
                                          namenode+"/basedir/middle/srcdat",
@@ -651,7 +651,7 @@ public class TestCopyFiles extends TestC
         for(int i = 0; i < srcstat.length; i++) {
           fs.setOwner(srcstat[i].getPath(), "u" + i, null);
         }
-        ToolRunner.run(new DistCp(conf),
+        ToolRunner.run(new DistCpV1(conf),
             new String[]{"-pu", nnUri+"/srcdat", nnUri+"/destdat"});
         assertTrue("Source and destination directories do not match.",
                    checkFiles(fs, "/destdat", files));
@@ -670,7 +670,7 @@ public class TestCopyFiles extends TestC
         for(int i = 0; i < srcstat.length; i++) {
           fs.setOwner(srcstat[i].getPath(), null, "g" + i);
         }
-        ToolRunner.run(new DistCp(conf),
+        ToolRunner.run(new DistCpV1(conf),
             new String[]{"-pg", nnUri+"/srcdat", nnUri+"/destdat"});
         assertTrue("Source and destination directories do not match.",
                    checkFiles(fs, "/destdat", files));
@@ -692,7 +692,7 @@ public class TestCopyFiles extends TestC
           fs.setPermission(srcstat[i].getPath(), permissions[i]);
         }
 
-        ToolRunner.run(new DistCp(conf),
+        ToolRunner.run(new DistCpV1(conf),
             new String[]{"-pp", nnUri+"/srcdat", nnUri+"/destdat"});
         assertTrue("Source and destination directories do not match.",
                    checkFiles(fs, "/destdat", files));
@@ -715,7 +715,7 @@ public class TestCopyFiles extends TestC
           fs.setTimes(srcstat[i].getPath(), 40, 50);
         }
 
-        ToolRunner.run(new DistCp(conf),
+        ToolRunner.run(new DistCpV1(conf),
             new String[]{"-pt", nnUri+"/srcdat", nnUri+"/destdat"});
 
         FileStatus[] dststat = getFileStatus(fs, "/destdat", files);
@@ -753,7 +753,7 @@ public class TestCopyFiles extends TestC
       }
       Configuration job = mr.createJobConf();
       job.setLong("distcp.bytes.per.map", totsize / 3);
-      ToolRunner.run(new DistCp(job),
+      ToolRunner.run(new DistCpV1(job),
           new String[] {"-m", "100",
                         "-log",
                         namenode+"/logs",
@@ -771,7 +771,7 @@ public class TestCopyFiles extends TestC
 
       deldir(fs, "/destdat");
       deldir(fs, "/logs");
-      ToolRunner.run(new DistCp(job),
+      ToolRunner.run(new DistCpV1(job),
           new String[] {"-m", "1",
                         "-log",
                         namenode+"/logs",
@@ -795,7 +795,7 @@ public class TestCopyFiles extends TestC
       cluster = new MiniDFSCluster(conf, 2, true, null);
       final String nnUri = FileSystem.getDefaultUri(conf).toString();
       final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
-      final DistCp distcp = new DistCp(conf);
+      final DistCpV1 distcp = new DistCpV1(conf);
       final FsShell shell = new FsShell(conf);  
 
       final String srcrootdir =  "/src_root";
@@ -927,9 +927,9 @@ public class TestCopyFiles extends TestC
       final String srcrootdir =  srcrootpath.toString();
       final Path dstrootpath = new Path(home, "dst_root"); 
       final String dstrootdir =  dstrootpath.toString();
-      final DistCp distcp = USER_UGI.doAs(new PrivilegedExceptionAction<DistCp>() {
-        public DistCp run() {
-          return new DistCp(userConf);
+      final DistCpV1 distcp = USER_UGI.doAs(new PrivilegedExceptionAction<DistCpV1>() {
+        public DistCpV1 run() {
+          return new DistCpV1(userConf);
         }
       });
 
@@ -961,7 +961,7 @@ public class TestCopyFiles extends TestC
       final String nnUri = nnURI.toString();
       final FileSystem fs = FileSystem.get(URI.create(nnUri), conf);
 
-      final DistCp distcp = new DistCp(conf);
+      final DistCpV1 distcp = new DistCpV1(conf);
       final FsShell shell = new FsShell(conf);  
 
       final String srcrootdir = "/src_root";
@@ -1035,7 +1035,7 @@ public class TestCopyFiles extends TestC
         MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
         String destdir = TEST_ROOT_DIR + "/destdat";
         MyFile[] localFiles = createFiles(localfs, destdir);
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-delete",
                                          "-update",
                                          "-log",
@@ -1066,7 +1066,7 @@ public class TestCopyFiles extends TestC
       namenode = FileSystem.getDefaultUri(conf).toString();
       if (namenode.startsWith("hdfs://")) {
         MyFile[] files = createFiles(URI.create(namenode), "/srcdat");
-        ToolRunner.run(new DistCp(conf), new String[] {
+        ToolRunner.run(new DistCpV1(conf), new String[] {
                                          "-log",
                                          namenode+"/logs",
                                          namenode+"/srcdat/*",

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/pom.xml?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/pom.xml Tue May  8 21:57:58 2012
@@ -91,6 +91,16 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
+      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java Tue May  8 21:57:58 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.io.Serializable;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.PriorityQueue;
@@ -59,7 +60,8 @@ public class DeskewedJobTraceReader impl
   static final private Log LOG =
       LogFactory.getLog(DeskewedJobTraceReader.class);
 
-  static private class JobComparator implements Comparator<LoggedJob> {
+  static private class JobComparator implements Comparator<LoggedJob>, 
+  Serializable {
     @Override
     public int compare(LoggedJob j1, LoggedJob j2) {
       return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java Tue May  8 21:57:58 2012
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public enum JobConfPropertyNames {
@@ -33,6 +35,6 @@ public enum JobConfPropertyNames {
   }
 
   public String[] getCandidates() {
-    return candidates;
+    return Arrays.copyOf(candidates, candidates.length);
   }
 }

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java Tue May  8 21:57:58 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.io.Serializable;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -68,7 +69,8 @@ public class LoggedNetworkTopology imple
    * order.
    * 
    */
-  static class TopoSort implements Comparator<LoggedNetworkTopology> {
+  static class TopoSort implements Comparator<LoggedNetworkTopology>, 
+  Serializable {
     public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) {
       return t1.name.getValue().compareTo(t2.name.getValue());
     }

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java Tue May  8 21:57:58 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -98,7 +99,7 @@ public class TraceBuilder extends Config
      * history file names should result in the order of jobs' submission times.
      */
     private static class HistoryLogsComparator
-        implements Comparator<FileStatus> {
+        implements Comparator<FileStatus>, Serializable {
       @Override
       public int compare(FileStatus file1, FileStatus file2) {
         return file1.getPath().getName().compareTo(

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java Tue May  8 21:57:58 2012
@@ -27,7 +27,7 @@ import org.apache.commons.lang.StringUti
  * //TODO There is no caching for saving memory.
  */
 public class WordListAnonymizerUtility {
-  public static final String[] KNOWN_WORDS = 
+  static final String[] KNOWN_WORDS = 
     new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"};
   
   /**

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java Tue May  8 21:57:58 2012
@@ -93,16 +93,8 @@ public class NodeName implements Anonymi
   }
   
   public NodeName(String rName, String hName) {
-    rName = (rName == null) 
-            ? rName 
-            : rName.length() == 0 
-              ? null 
-              : rName;
-    hName = (hName == null) 
-            ? hName 
-            : hName.length() == 0 
-              ? null 
-              : hName;
+    rName = (rName == null || rName.length() == 0) ? null : rName;
+    hName = (hName == null || hName.length() == 0) ? null : hName;
     if (hName == null) {
       nodeName = rName;
       rackName = rName;

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/pom.xml?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/pom.xml Tue May  8 21:57:58 2012
@@ -96,6 +96,16 @@
 
   <build>
     <plugins>
+       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java?rev=1335791&r1=1335790&r2=1335791&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java Tue May  8 21:57:58 2012
@@ -91,7 +91,7 @@ public class StreamJob implements Tool {
   @Deprecated
   public StreamJob(String[] argv, boolean mayExit) {
     this();
-    argv_ = argv;
+    argv_ = Arrays.copyOf(argv, argv.length);
     this.config_ = new Configuration();
   }
 
@@ -113,7 +113,7 @@ public class StreamJob implements Tool {
   @Override
   public int run(String[] args) throws Exception {
     try {
-      this.argv_ = args;
+      this.argv_ = Arrays.copyOf(args, args.length);
       init();
 
       preProcessArgs();
@@ -290,7 +290,7 @@ public class StreamJob implements Tool {
         LOG.warn("-file option is deprecated, please use generic option" +
         		" -files instead.");
 
-        String fileList = null;
+        StringBuffer fileList = new StringBuffer();
         for (String file : values) {
           packageFiles_.add(file);
           try {
@@ -298,13 +298,15 @@ public class StreamJob implements Tool {
             Path path = new Path(pathURI);
             FileSystem localFs = FileSystem.getLocal(config_);
             String finalPath = path.makeQualified(localFs).toString();
-            fileList = fileList == null ? finalPath : fileList + "," + finalPath;
+            if(fileList.length() > 0) {
+              fileList.append(',');
+            }
+            fileList.append(finalPath);
           } catch (Exception e) {
             throw new IllegalArgumentException(e);
           }
         }
-        config_.set("tmpfiles", config_.get("tmpfiles", "") +
-                                  (fileList == null ? "" : fileList));
+        config_.set("tmpfiles", config_.get("tmpfiles", "") + fileList);
         validate(packageFiles_);
       }