You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/05/04 22:23:14 UTC

svn commit: r1334158 - in /hadoop/common/branches/HDFS-3092: ./ dev-support/ hadoop-project/ hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/ hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/ hadoop-tools/hadoop-...

Author: szetszwo
Date: Fri May  4 20:22:57 2012
New Revision: 1334158

URL: http://svn.apache.org/viewvc?rev=1334158&view=rev
Log:
Merge r1332460 through r1334157 from trunk.

Removed:
    hadoop/common/branches/HDFS-3092/dev-support/test-patch.properties
Modified:
    hadoop/common/branches/HDFS-3092/   (props changed)
    hadoop/common/branches/HDFS-3092/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
    hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
    hadoop/common/branches/HDFS-3092/pom.xml

Propchange: hadoop/common/branches/HDFS-3092/
------------------------------------------------------------------------------
  Merged /hadoop/common/trunk:r1332460-1334157

Modified: hadoop/common/branches/HDFS-3092/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/dev-support/test-patch.sh?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-3092/dev-support/test-patch.sh Fri May  4 20:22:57 2012
@@ -240,15 +240,6 @@ setup () {
       cleanupAndExit 0
     fi
   fi
-  . $BASEDIR/dev-support/test-patch.properties
-  ### exit if warnings are NOT defined in the properties file
-  if [ -z "$OK_FINDBUGS_WARNINGS" ] || [[ -z "$OK_JAVADOC_WARNINGS" ]] || [[ -z $OK_RELEASEAUDIT_WARNINGS ]]; then
-    echo "Please define the following properties in test-patch.properties file"
-	 echo  "OK_FINDBUGS_WARNINGS"
-	 echo  "OK_RELEASEAUDIT_WARNINGS"
-	 echo  "OK_JAVADOC_WARNINGS"
-    cleanupAndExit 1
-  fi
   echo ""
   echo ""
   echo "======================================================================"
@@ -389,10 +380,10 @@ checkJavadocWarnings () {
   echo ""
   echo "$MVN clean test javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1"
   if [ -d hadoop-project ]; then
-    (cd hadoop-project; $MVN install)
+    (cd hadoop-project; $MVN install > /dev/null 2>&1)
   fi
   if [ -d hadoop-common-project/hadoop-annotations ]; then  
-    (cd hadoop-common-project/hadoop-annotations; $MVN install)
+    (cd hadoop-common-project/hadoop-annotations; $MVN install > /dev/null 2>&1)
   fi
   $MVN clean test javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
   javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | $AWK '/Javadoc Warnings/,EOF' | $GREP warning | $AWK 'BEGIN {total = 0} {total += 1} END {print total}'`
@@ -400,8 +391,10 @@ checkJavadocWarnings () {
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
 
+  #There are 6 warnings that are caused by things that are caused by using sun internal APIs.
+  OK_JAVADOC_WARNINGS=6;
   ### if current warnings greater than OK_JAVADOC_WARNINGS
-  if [[ $javadocWarnings -gt $OK_JAVADOC_WARNINGS ]] ; then
+  if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 javadoc.  The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
@@ -472,8 +465,8 @@ checkReleaseAuditWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1"
-  $MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
+  echo "$MVN apache-rat:check -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchReleaseAuditOutput.txt 2>&1"
+  $MVN apache-rat:check -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchReleaseAuditOutput.txt 2>&1
   find $BASEDIR -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
 
   ### Compare trunk and patch release audit warning numbers
@@ -481,12 +474,12 @@ checkReleaseAuditWarnings () {
     patchReleaseAuditWarnings=`$GREP -c '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt`
     echo ""
     echo ""
-    echo "There appear to be $OK_RELEASEAUDIT_WARNINGS release audit warnings before the patch and $patchReleaseAuditWarnings release audit warnings after applying the patch."
-    if [[ $patchReleaseAuditWarnings != "" && $OK_RELEASEAUDIT_WARNINGS != "" ]] ; then
-      if [[ $patchReleaseAuditWarnings -gt $OK_RELEASEAUDIT_WARNINGS ]] ; then
+    echo "There appear to be $patchReleaseAuditWarnings release audit warnings after applying the patch."
+    if [[ $patchReleaseAuditWarnings != "" ]] ; then
+      if [[ $patchReleaseAuditWarnings -gt 0 ]] ; then
         JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 release audit.  The applied patch generated $patchReleaseAuditWarnings release audit warnings (more than the trunk's current $OK_RELEASEAUDIT_WARNINGS warnings)."
+    -1 release audit.  The applied patch generated $patchReleaseAuditWarnings release audit warnings."
         $GREP '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt > $PATCH_DIR/patchReleaseAuditProblems.txt
         echo "Lines that start with ????? in the release audit report indicate files that do not have an Apache license header." >> $PATCH_DIR/patchReleaseAuditProblems.txt
         JIRA_COMMENT_FOOTER="Release audit warnings: $BUILD_URL/artifact/trunk/patchprocess/patchReleaseAuditProblems.txt
@@ -548,10 +541,21 @@ checkFindbugsWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$MVN clean test findbugs:findbugs -DskipTests -D${PROJECT_NAME}PatchProcess" 
-  $MVN clean test findbugs:findbugs -DskipTests -D${PROJECT_NAME}PatchProcess < /dev/null
+  
+  modules=$(findModules)
+  rc=0
+  for module in $modules;
+  do
+    cd $module
+    echo "  Running findbugs in $module"
+    module_suffix=`basename ${module}`
+    echo "$MVN clean test findbugs:findbugs -DskipTests -D${PROJECT_NAME}PatchProcess < /dev/null > $PATCH_DIR/patchFindBugsOutput${module_suffix}.txt 2>&1" 
+    $MVN clean test findbugs:findbugs -DskipTests -D${PROJECT_NAME}PatchProcess < /dev/null > $PATCH_DIR/patchFindBugsOutput${module_suffix}.txt 2>&1
+    (( rc = rc + $? ))
+    cd -
+  done
 
-  if [ $? != 0 ] ; then
+  if [ $rc != 0 ] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
     -1 findbugs.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
@@ -584,11 +588,10 @@ $JIRA_COMMENT_FOOTER"
     fi
   done
 
-  ### if current warnings greater than OK_FINDBUGS_WARNINGS
-  if [[ $findbugsWarnings -gt $OK_FINDBUGS_WARNINGS ]] ; then
+  if [[ $findbugsWarnings -gt 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 findbugs.  The patch appears to introduce `expr $(($findbugsWarnings-$OK_FINDBUGS_WARNINGS))` new Findbugs (version ${findbugs_version}) warnings."
+    -1 findbugs.  The patch appears to introduce $findbugsWarnings new Findbugs (version ${findbugs_version}) warnings."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
@@ -610,8 +613,8 @@ checkEclipseGeneration () {
   echo ""
   echo ""
 
-  echo "$MVN eclipse:eclipse -D${PROJECT_NAME}PatchProcess"
-  $MVN eclipse:eclipse -D${PROJECT_NAME}PatchProcess
+  echo "$MVN eclipse:eclipse -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchEclipseOutput.txt 2>&1"
+  $MVN eclipse:eclipse -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchEclipseOutput.txt 2>&1
   if [[ $? != 0 ]] ; then
       JIRA_COMMENT="$JIRA_COMMENT
 
@@ -639,16 +642,28 @@ runTests () {
   echo ""
   echo ""
 
-  echo "$MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess"
-  $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess
-  failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
-  # With -fn mvn always exits with a 0 exit code.  Because of this we need to
-  # find the errors instead of using the exit code.  We assume that if the build
-  # failed a -1 is already given for that case
+  failed_tests=""
+  modules=$(findModules)
+  for module in $modules;
+  do
+    cd $module
+    echo "  Running tests in $module"
+    echo "  $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess"
+    $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess
+    module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
+    # With -fn mvn always exits with a 0 exit code.  Because of this we need to
+    # find the errors instead of using the exit code.  We assume that if the build
+    # failed a -1 is already given for that case
+    if [[ -n "$module_failed_tests" ]] ; then
+      failed_tests="${failed_tests}
+${module_failed_tests}"
+    fi
+    cd -
+  done
   if [[ -n "$failed_tests" ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 core tests.  The patch failed these unit tests:
+    -1 core tests.  The patch failed these unit tests in $modules:
 $failed_tests"
     return 1
   fi
@@ -659,6 +674,51 @@ $failed_tests"
 }
 
 ###############################################################################
+# Find the maven module containing the given file.
+findModule (){
+ dir=`dirname $1`
+ while [ 1 ]
+ do
+  if [ -f "$dir/pom.xml" ]
+  then
+    echo $dir
+    return
+  else
+    dir=`dirname $dir`
+  fi
+ done
+}
+
+findModules () {
+  # Come up with a list of changed files into $TMP
+  TMP=/tmp/tmp.paths.$$
+  $GREP '^+++\|^---' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
+  
+  # if all of the lines start with a/ or b/, then this is a git patch that
+  # was generated without --no-prefix
+  if ! $GREP -qv '^a/\|^b/' $TMP ; then
+    sed -i -e 's,^[ab]/,,' $TMP
+  fi
+  
+  # Now find all the modules that were changed
+  TMP_MODULES=/tmp/tmp.modules.$$
+  for file in $(cut -f 1 $TMP | sort | uniq); do
+    echo $(findModule $file) >> $TMP_MODULES
+  done
+  rm $TMP
+  
+  # Filter out modules without code 
+  CHANGED_MODULES=""
+  for module in $(cat $TMP_MODULES | sort | uniq); do
+    $GREP "<packaging>pom</packaging>" $module/pom.xml > /dev/null
+    if [ "$?" != 0 ]; then
+      CHANGED_MODULES="$CHANGED_MODULES $module"
+    fi
+  done
+  rm $TMP_MODULES
+  echo $CHANGED_MODULES
+}
+###############################################################################
 ### Run the test-contrib target
 runContribTests () {
   echo ""

Modified: hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-project/pom.xml Fri May  4 20:22:57 2012
@@ -681,11 +681,6 @@
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
-          <artifactId>maven-assembly-plugin</artifactId>
-          <version>2.2.1</version>
-        </plugin>
-        <plugin>
-          <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-javadoc-plugin</artifactId>
           <version>2.8.1</version>
         </plugin>

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Fri May  4 20:22:57 2012
@@ -33,6 +33,7 @@ import org.apache.hadoop.tools.CopyListi
 import org.apache.hadoop.tools.mapred.CopyMapper;
 import org.apache.hadoop.tools.mapred.CopyOutputFormat;
 import org.apache.hadoop.tools.util.DistCpUtils;
+import org.apache.hadoop.util.ShutdownHookManager;
 import org.apache.hadoop.util.Tool;
 import org.apache.hadoop.util.ToolRunner;
 
@@ -49,6 +50,12 @@ import java.util.Random;
  * behaviour.
  */
 public class DistCp extends Configured implements Tool {
+
+  /**
+   * Priority of the ResourceManager shutdown hook.
+   */
+  public static final int SHUTDOWN_HOOK_PRIORITY = 30;
+
   private static final Log LOG = LogFactory.getLog(DistCp.class);
 
   private DistCpOptions inputOptions;
@@ -353,7 +360,8 @@ public class DistCp extends Configured i
       DistCp distCp = new DistCp();
       Cleanup CLEANUP = new Cleanup(distCp);
 
-      Runtime.getRuntime().addShutdownHook(CLEANUP);
+      ShutdownHookManager.get().addShutdownHook(CLEANUP,
+        SHUTDOWN_HOOK_PRIORITY);
       System.exit(ToolRunner.run(getDefaultConf(), distCp, argv));
     }
     catch (Exception e) {
@@ -388,7 +396,7 @@ public class DistCp extends Configured i
     return submitted;
   }
 
-  private static class Cleanup extends Thread {
+  private static class Cleanup implements Runnable {
     private final DistCp distCp;
 
     public Cleanup(DistCp distCp) {

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-extras/src/main/java/org/apache/hadoop/mapred/tools/package-info.java Fri May  4 20:22:57 2012
@@ -17,7 +17,6 @@
  */
 
 /**
- * Command-line tools associated with the {@link org.apache.hadoop.mapred}
- * package.
+ * Command-line tools associated with MapReduce.
  */
 package org.apache.hadoop.mapred.tools;

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/CurrentJHParser.java Fri May  4 20:22:57 2012
@@ -26,9 +26,7 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.v2.hs.JobHistory;
 
 /**
- * {@link JobHistoryParser} that parses {@link JobHistory} files produced by
- * {@link org.apache.hadoop.mapreduce.jobhistory.JobHistory} in the same source
- * code tree as rumen.
+ * {@link JobHistoryParser} that parses JobHistory files.
  */
 public class CurrentJHParser implements JobHistoryParser {
   private EventReader reader;

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/LoggedTaskAttempt.java Fri May  4 20:22:57 2012
@@ -170,7 +170,7 @@ public class LoggedTaskAttempt implement
 
   /**
    *
-   * @returns a list of all splits vectors, ordered in enumeral order
+   * @return a list of all splits vectors, ordered in enumeral order
    *           within {@link SplitVectorKind} .  Do NOT use hard-coded
    *           indices within the return for this with a hard-coded
    *           index to get individual values; use

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/datatypes/util/MapReduceJobPropertiesParser.java Fri May  4 20:22:57 2012
@@ -43,7 +43,7 @@ import org.apache.log4j.Logger;
  * across versions. {@link MapReduceJobPropertiesParser} is a utility class that
  * parses MapReduce job configuration properties and converts the value into a 
  * well defined {@link DataType}. Users can use the
- * {@link MapReduceJobPropertiesParser#parseJobProperty()} API to process job 
+ * {@link #parseJobProperty(String, String)} API to process job 
  * configuration parameters. This API will parse a job property represented as a
  * key-value pair and return the value wrapped inside a {@link DataType}. 
  * Callers can then use the returned {@link DataType} for further processing.
@@ -61,12 +61,12 @@ import org.apache.log4j.Logger;
  * {@link DefaultDataType}. Currently only '-Xmx' and '-Xms' settings are 
  * considered while the rest are ignored.
  * 
- * Note that the {@link MapReduceJobPropertiesParser#parseJobProperty()} API 
+ * Note that the {@link #parseJobProperty(String, String)} API 
  * maps the keys to a configuration parameter listed in 
  * {@link MRJobConfig}. This not only filters non-framework specific keys thus 
  * ignoring user-specific and hard-to-parse keys but also provides a consistent
  * view for all possible inputs. So if users invoke the 
- * {@link MapReduceJobPropertiesParser#parseJobProperty()} API with either
+ * {@link #parseJobProperty(String, String)} API with either
  * <"mapreduce.job.user.name", "bob"> or <"user.name", "bob">, then the result 
  * would be a {@link UserName} {@link DataType} wrapping the user-name "bob".
  */

Modified: hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java (original)
+++ hadoop/common/branches/HDFS-3092/hadoop-tools/hadoop-rumen/src/main/java/org/apache/hadoop/tools/rumen/package-info.java Fri May  4 20:22:57 2012
@@ -181,8 +181,8 @@
  *  <li>
  *    {@link org.apache.hadoop.tools.rumen.JobBuilder}<br>
  *      Summarizes a job history file.
- *      {@link org.apache.hadoop.tools.rumen.TraceBuilder} provides  
- *      {@link org.apache.hadoop.tools.rumen.TraceBuilder#extractJobID(String)} 
+ *      {@link org.apache.hadoop.tools.rumen.JobHistoryUtils} provides  
+ *      {@link org.apache.hadoop.tools.rumen.JobHistoryUtils#extractJobID(String)} 
  *      API for extracting job id from job history or job configuration files
  *      which can be used for instantiating {@link org.apache.hadoop.tools.rumen.JobBuilder}. 
  *      {@link org.apache.hadoop.tools.rumen.JobBuilder} generates a 

Modified: hadoop/common/branches/HDFS-3092/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3092/pom.xml?rev=1334158&r1=1334157&r2=1334158&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3092/pom.xml (original)
+++ hadoop/common/branches/HDFS-3092/pom.xml Fri May  4 20:22:57 2012
@@ -103,6 +103,11 @@
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-assembly-plugin</artifactId>
+          <version>2.3</version>
+        </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-deploy-plugin</artifactId>
           <version>2.5</version>
         </plugin>