You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by bo...@apache.org on 2012/05/08 15:20:57 UTC

svn commit: r1335505 - in /hadoop/common/trunk/hadoop-tools: hadoop-archives/src/main/java/org/apache/hadoop/tools/ hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-extras/src/main/java/org/apache/hadoop/tools/ hadoop-rumen/ hadoop-rumen/dev...

Author: bobby
Date: Tue May  8 13:20:56 2012
New Revision: 1335505

URL: http://svn.apache.org/viewvc?rev=1335505&view=rev
Log:
HADOOP-8341. Fix or filter findbugs issues in hadoop-tools (bobby)

Added:
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml
    hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/
    hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml
Modified:
    hadoop/common/trunk/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
    hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/pom.xml
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java
    hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
    hadoop/common/trunk/hadoop-tools/hadoop-streaming/pom.xml
    hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java

Modified: hadoop/common/trunk/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-archives/src/main/java/org/apache/hadoop/tools/HadoopArchives.java Tue May  8 13:20:56 2012
@@ -117,7 +117,7 @@ public class HadoopArchives implements T
     // will when running the mapreduce job.
     String testJar = System.getProperty(TEST_HADOOP_ARCHIVES_JAR_PATH, null);
     if (testJar != null) {
-      ((JobConf)conf).setJar(testJar);
+      this.conf.setJar(testJar);
     }
   }
 

Modified: hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Tue May  8 13:20:56 2012
@@ -136,10 +136,13 @@ public class DistCp extends Configured i
 
     Job job = null;
     try {
-      metaFolder = createMetaFolderPath();
-      jobFS = metaFolder.getFileSystem(getConf());
+      synchronized(this) {
+        //Don't cleanup while we are setting up.
+        metaFolder = createMetaFolderPath();
+        jobFS = metaFolder.getFileSystem(getConf());
 
-      job = createJob();
+        job = createJob();
+      }
       createInputFileListing(job);
 
       job.submit();

Modified: hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/tools/Logalyzer.java Tue May  8 13:20:56 2012
@@ -65,9 +65,9 @@ import org.apache.hadoop.mapreduce.lib.m
 public class Logalyzer {
   // Constants
   private static Configuration fsConfig = new Configuration();
-  public static String SORT_COLUMNS = 
+  public static final String SORT_COLUMNS = 
     "logalizer.logcomparator.sort.columns";
-  public static String COLUMN_SEPARATOR = 
+  public static final String COLUMN_SEPARATOR = 
     "logalizer.logcomparator.column.separator";
   
   static {

Added: hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml?rev=1335505&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml (added)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/dev-support/findbugs-exclude.xml Tue May  8 13:20:56 2012
@@ -0,0 +1,31 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <And>
+    <Class name="org.apache.hadoop.tools.rumen.LoggedJob"/>
+    <Method name="getMapperTriesToSucceed"/>
+    <Bug pattern="EI_EXPOSE_REP"/>
+    <Bug code="EI"/>
+  </And>
+  <And>
+    <Class name="org.apache.hadoop.tools.rumen.ZombieJob"/>
+    <Method name="getInputSplits"/>
+    <Bug pattern="EI_EXPOSE_REP"/>
+    <Bug code="EI"/>
+  </And>
+</FindBugsFilter>

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/pom.xml?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/pom.xml (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/pom.xml Tue May  8 13:20:56 2012
@@ -91,6 +91,16 @@
   <build>
     <plugins>
       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
+      <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>
         <executions>

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/DeskewedJobTraceReader.java Tue May  8 13:20:56 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 
 import java.io.Closeable;
 import java.io.IOException;
+import java.io.Serializable;
 import java.util.Comparator;
 import java.util.Iterator;
 import java.util.PriorityQueue;
@@ -59,7 +60,8 @@ public class DeskewedJobTraceReader impl
   static final private Log LOG =
       LogFactory.getLog(DeskewedJobTraceReader.class);
 
-  static private class JobComparator implements Comparator<LoggedJob> {
+  static private class JobComparator implements Comparator<LoggedJob>, 
+  Serializable {
     @Override
     public int compare(LoggedJob j1, LoggedJob j2) {
       return (j1.getSubmitTime() < j2.getSubmitTime()) ? -1 : (j1

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/JobConfPropertyNames.java Tue May  8 13:20:56 2012
@@ -17,6 +17,8 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.util.Arrays;
+
 import org.apache.hadoop.mapreduce.MRJobConfig;
 
 public enum JobConfPropertyNames {
@@ -33,6 +35,6 @@ public enum JobConfPropertyNames {
   }
 
   public String[] getCandidates() {
-    return candidates;
+    return Arrays.copyOf(candidates, candidates.length);
   }
 }

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedNetworkTopology.java Tue May  8 13:20:56 2012
@@ -17,6 +17,7 @@
  */
 package org.apache.hadoop.tools.rumen;
 
+import java.io.Serializable;
 import java.util.Collections;
 import java.util.HashMap;
 import java.util.HashSet;
@@ -68,7 +69,8 @@ public class LoggedNetworkTopology imple
    * order.
    * 
    */
-  static class TopoSort implements Comparator<LoggedNetworkTopology> {
+  static class TopoSort implements Comparator<LoggedNetworkTopology>, 
+  Serializable {
     public int compare(LoggedNetworkTopology t1, LoggedNetworkTopology t2) {
       return t1.name.getValue().compareTo(t2.name.getValue());
     }

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/TraceBuilder.java Tue May  8 13:20:56 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.tools.rumen;
 import java.io.FileNotFoundException;
 import java.io.IOException;
 import java.io.InputStream;
+import java.io.Serializable;
 import java.util.ArrayList;
 import java.util.Arrays;
 import java.util.Comparator;
@@ -98,7 +99,7 @@ public class TraceBuilder extends Config
      * history file names should result in the order of jobs' submission times.
      */
     private static class HistoryLogsComparator
-        implements Comparator<FileStatus> {
+        implements Comparator<FileStatus>, Serializable {
       @Override
       public int compare(FileStatus file1, FileStatus file2) {
         return file1.getPath().getName().compareTo(

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/anonymization/WordListAnonymizerUtility.java Tue May  8 13:20:56 2012
@@ -27,7 +27,7 @@ import org.apache.commons.lang.StringUti
  * //TODO There is no caching for saving memory.
  */
 public class WordListAnonymizerUtility {
-  public static final String[] KNOWN_WORDS = 
+  static final String[] KNOWN_WORDS = 
     new String[] {"job", "tmp", "temp", "home", "homes", "usr", "user", "test"};
   
   /**

Modified: hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/NodeName.java Tue May  8 13:20:56 2012
@@ -93,16 +93,8 @@ public class NodeName implements Anonymi
   }
   
   public NodeName(String rName, String hName) {
-    rName = (rName == null) 
-            ? rName 
-            : rName.length() == 0 
-              ? null 
-              : rName;
-    hName = (hName == null) 
-            ? hName 
-            : hName.length() == 0 
-              ? null 
-              : hName;
+    rName = (rName == null || rName.length() == 0) ? null : rName;
+    hName = (hName == null || hName.length() == 0) ? null : hName;
     if (hName == null) {
       nodeName = rName;
       rackName = rName;

Added: hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml?rev=1335505&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml (added)
+++ hadoop/common/trunk/hadoop-tools/hadoop-streaming/dev-support/findbugs-exclude.xml Tue May  8 13:20:56 2012
@@ -0,0 +1,30 @@
+<?xml version="1.0" encoding="UTF-8"?>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<FindBugsFilter>
+  <Match>
+    <Or>
+      <Class name="org.apache.hadoop.streaming.PipeMapper" />
+      <Class name="org.apache.hadoop.streaming.PipeReducer"/>
+    </Or>
+    <Or>
+      <Method name="getFieldSeparator"/>
+      <Method name="getInputSeparator"/>
+    </Or>
+    <Bug pattern="EI_EXPOSE_REP"/>
+  </Match>
+</FindBugsFilter>

Modified: hadoop/common/trunk/hadoop-tools/hadoop-streaming/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-streaming/pom.xml?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-streaming/pom.xml (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-streaming/pom.xml Tue May  8 13:20:56 2012
@@ -96,6 +96,16 @@
 
   <build>
     <plugins>
+       <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>findbugs-maven-plugin</artifactId>
+         <configuration>
+          <findbugsXmlOutput>true</findbugsXmlOutput>
+          <xmlOutput>true</xmlOutput>
+          <excludeFilterFile>${basedir}/dev-support/findbugs-exclude.xml</excludeFilterFile>
+          <effort>Max</effort>
+        </configuration>
+      </plugin>
       <plugin>
         <groupId>org.apache.maven.plugins</groupId>
         <artifactId>maven-antrun-plugin</artifactId>

Modified: hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java?rev=1335505&r1=1335504&r2=1335505&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/common/trunk/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java Tue May  8 13:20:56 2012
@@ -91,7 +91,7 @@ public class StreamJob implements Tool {
   @Deprecated
   public StreamJob(String[] argv, boolean mayExit) {
     this();
-    argv_ = argv;
+    argv_ = Arrays.copyOf(argv, argv.length);
     this.config_ = new Configuration();
   }
 
@@ -113,7 +113,7 @@ public class StreamJob implements Tool {
   @Override
   public int run(String[] args) throws Exception {
     try {
-      this.argv_ = args;
+      this.argv_ = Arrays.copyOf(args, args.length);
       init();
 
       preProcessArgs();
@@ -290,7 +290,7 @@ public class StreamJob implements Tool {
         LOG.warn("-file option is deprecated, please use generic option" +
         		" -files instead.");
 
-        String fileList = null;
+        StringBuffer fileList = new StringBuffer();
         for (String file : values) {
           packageFiles_.add(file);
           try {
@@ -298,13 +298,15 @@ public class StreamJob implements Tool {
             Path path = new Path(pathURI);
             FileSystem localFs = FileSystem.getLocal(config_);
             String finalPath = path.makeQualified(localFs).toString();
-            fileList = fileList == null ? finalPath : fileList + "," + finalPath;
+            if(fileList.length() > 0) {
+              fileList.append(',');
+            }
+            fileList.append(finalPath);
           } catch (Exception e) {
             throw new IllegalArgumentException(e);
           }
         }
-        config_.set("tmpfiles", config_.get("tmpfiles", "") +
-                                  (fileList == null ? "" : fileList));
+        config_.set("tmpfiles", config_.get("tmpfiles", "") + fileList);
         validate(packageFiles_);
       }