You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by sz...@apache.org on 2012/10/19 04:27:37 UTC

svn commit: r1399950 - in /hadoop/common/branches/HDFS-2802: ./ dev-support/ dev-support/cmake-maven-ng-plugin/ dev-support/cmake-maven-ng-plugin/dev-support/ dev-support/cmake-maven-ng-plugin/src/ dev-support/cmake-maven-ng-plugin/src/main/ dev-suppor...

Author: szetszwo
Date: Fri Oct 19 02:25:55 2012
New Revision: 1399950

URL: http://svn.apache.org/viewvc?rev=1399950&view=rev
Log:
Merge r1360400 through r1399945 from trunk.

Added:
    hadoop/common/branches/HDFS-2802/.gitattributes
      - copied unchanged from r1399945, hadoop/common/trunk/.gitattributes
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/   (props changed)
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/dev-support/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/dev-support/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/dev-support/findbugsExcludeFile.xml
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/dev-support/findbugsExcludeFile.xml
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/pom.xml
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/pom.xml
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/
      - copied from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/CompileMojo.java
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/CompileMojo.java
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/TestMojo.java
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/TestMojo.java
    hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/Utils.java
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/cmake-maven-ng-plugin/src/main/java/org/apache/hadoop/cmake/maven/ng/Utils.java
    hadoop/common/branches/HDFS-2802/dev-support/pom.xml
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/pom.xml
    hadoop/common/branches/HDFS-2802/dev-support/relnotes.py
      - copied unchanged from r1399945, hadoop/common/trunk/dev-support/relnotes.py
    hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
      - copied unchanged from r1399945, hadoop/common/trunk/hadoop-assemblies/src/main/resources/assemblies/hadoop-yarn-dist.xml
Removed:
    hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-raid-dist.xml
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestDistCp.java
Modified:
    hadoop/common/branches/HDFS-2802/   (props changed)
    hadoop/common/branches/HDFS-2802/BUILDING.txt
    hadoop/common/branches/HDFS-2802/dev-support/   (props changed)
    hadoop/common/branches/HDFS-2802/dev-support/smart-apply-patch.sh
    hadoop/common/branches/HDFS-2802/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
    hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
    hadoop/common/branches/HDFS-2802/hadoop-client/pom.xml
    hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml
    hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-2802/hadoop-project/src/site/site.xml
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-datajoin/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-gridmix/   (props changed)
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/pom.xml
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
    hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
    hadoop/common/branches/HDFS-2802/pom.xml

Propchange: hadoop/common/branches/HDFS-2802/
------------------------------------------------------------------------------
  Merged /hadoop/common/branches/HDFS-3077:r1363593-1396941
  Merged /hadoop/common/trunk:r1360400-1399945

Modified: hadoop/common/branches/HDFS-2802/BUILDING.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/BUILDING.txt?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/BUILDING.txt (original)
+++ hadoop/common/branches/HDFS-2802/BUILDING.txt Fri Oct 19 02:25:55 2012
@@ -54,12 +54,32 @@ Maven build goals:
  Build options:
 
   * Use -Pnative to compile/bundle native code
-  * Use -Dsnappy.prefix=(/usr/local) & -Dbundle.snappy=(false) to compile
-    Snappy JNI bindings and to bundle Snappy SO files
   * Use -Pdocs to generate & bundle the documentation in the distribution (using -Pdist)
   * Use -Psrc to create a project source TAR.GZ
   * Use -Dtar to create a TAR with the distribution (using -Pdist)
 
+ Snappy build options:
+
+   Snappy is a compression library that can be utilized by the native code.
+   It is currently an optional component, meaning that Hadoop can be built with
+   or without this dependency.
+
+  * Use -Drequire.snappy to fail the build if libsnappy.so is not found.
+    If this option is not specified and the snappy library is missing,
+    we silently build a version of libhadoop.so that cannot make use of snappy.
+    This option is recommended if you plan on making use of snappy and want
+    to get more repeatable builds.
+
+  * Use -Dsnappy.prefix to specify a nonstandard location for the libsnappy
+    header files and library files. You do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dsnappy.lib to specify a nonstandard location for the libsnappy library
+    files.  Similarly to snappy.prefix, you do not need this option if you have
+    installed snappy using a package manager.
+  * Use -Dbundle.snappy to copy the contents of the snappy.lib directory into
+    the final tar file. This option requires that -Dsnappy.lib is also given,
+    and it ignores the -Dsnappy.prefix option.
+
    Tests options:
 
   * Use -DskipTests to skip tests when running the following Maven goals:

Propchange: hadoop/common/branches/HDFS-2802/dev-support/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Oct 19 02:25:55 2012
@@ -0,0 +1 @@
+target

Propchange: hadoop/common/branches/HDFS-2802/dev-support/cmake-maven-ng-plugin/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Fri Oct 19 02:25:55 2012
@@ -0,0 +1 @@
+target

Modified: hadoop/common/branches/HDFS-2802/dev-support/smart-apply-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/dev-support/smart-apply-patch.sh?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/dev-support/smart-apply-patch.sh (original)
+++ hadoop/common/branches/HDFS-2802/dev-support/smart-apply-patch.sh Fri Oct 19 02:25:55 2012
@@ -79,15 +79,15 @@ if $PATCH -p0 -E --dry-run < $PATCH_FILE
     if [[ -d hadoop-common-project ]]; then
       echo Looks like this is being run at project root
 
-    # if all of the lines start with hadoop-common/, hadoop-hdfs/, or hadoop-mapreduce/, this is
+    # if all of the lines start with hadoop-common/, hadoop-hdfs/, hadoop-yarn/ or hadoop-mapreduce/, this is
     # relative to the hadoop root instead of the subproject root, so we need
     # to chop off another layer
-    elif [[ "$PREFIX_DIRS_AND_FILES" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
+    elif [[ "$PREFIX_DIRS_AND_FILES" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-yarn-project|hadoop-mapreduce-project)$ ]]; then
 
       echo Looks like this is relative to project root. Increasing PLEVEL
       PLEVEL=$[$PLEVEL + 1]
 
-    elif ! echo "$PREFIX_DIRS_AND_FILES" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
+    elif ! echo "$PREFIX_DIRS_AND_FILES" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-yarn-project\|hadoop-mapreduce-project' ; then
       echo Looks like this is a cross-subproject patch. Try applying from the project root
       cleanup 1
     fi

Modified: hadoop/common/branches/HDFS-2802/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/dev-support/test-patch.sh?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-2802/dev-support/test-patch.sh Fri Oct 19 02:25:55 2012
@@ -32,7 +32,7 @@ JENKINS=false
 PATCH_DIR=/tmp
 SUPPORT_DIR=/tmp
 BASEDIR=$(pwd)
-
+BUILD_NATIVE=true
 PS=${PS:-ps}
 AWK=${AWK:-awk}
 WGET=${WGET:-wget}
@@ -67,6 +67,7 @@ printUsage() {
   echo "--forrest-home=<path>  Forrest home directory (default FORREST_HOME environment variable)"
   echo "--dirty-workspace      Allow the local SVN workspace to have uncommitted changes"
   echo "--run-tests            Run all tests below the base directory"
+  echo "--build-native=<bool>  If true, then build native components (default 'true')"
   echo
   echo "Jenkins-only options:"
   echo "--jenkins              Run by Jenkins (runs tests and posts results to JIRA)"
@@ -139,11 +140,18 @@ parseArgs() {
     --run-tests)
       RUN_TESTS=true
       ;;
+    --build-native=*)
+      BUILD_NATIVE=${i#*=}
+      ;;
     *)
       PATCH_OR_DEFECT=$i
       ;;
     esac
   done
+  if [[ $BUILD_NATIVE == "true" ]] ; then
+    NATIVE_PROFILE=-Pnative
+    REQUIRE_TEST_LIB_HADOOP=-Drequire.test.libhadoop
+  fi
   if [ -z "$PATCH_OR_DEFECT" ]; then
     printUsage
     exit 1
@@ -250,7 +258,7 @@ verifyPatch () {
     echo "PATCH APPLICATION FAILED"
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 patch.  The patch command could not apply the patch."
+    {color:red}-1 patch{color}.  The patch command could not apply the patch."
     return 1
   else
     return 0
@@ -305,12 +313,12 @@ checkAuthor () {
   if [[ $authorTags != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 @author.  The patch appears to contain $authorTags @author tags which the Hadoop community has agreed to not allow in code contributions."
+    {color:red}-1 @author{color}.  The patch appears to contain $authorTags @author tags which the Hadoop community has agreed to not allow in code contributions."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 @author.  The patch does not contain any @author tags."
+    {color:green}+1 @author{color}.  The patch does not contain any @author tags."
   return 0
 }
 
@@ -335,20 +343,20 @@ checkTests () {
         echo "The patch appears to be a documentation patch that doesn't require tests."
         JIRA_COMMENT="$JIRA_COMMENT
 
-    +0 tests included.  The patch appears to be a documentation patch that doesn't require tests."
+    {color:green}+0 tests included{color}.  The patch appears to be a documentation patch that doesn't require tests."
         return 0
       fi
     fi
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 tests included.  The patch doesn't appear to include any new or modified tests.
+    {color:red}-1 tests included{color}.  The patch doesn't appear to include any new or modified tests.
                         Please justify why no new tests are needed for this patch.
                         Also please list what manual steps were performed to verify this patch."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 tests included.  The patch appears to include $testReferences new or modified test files."
+    {color:green}+1 tests included{color}.  The patch appears to include $testReferences new or modified test files."
   return 0
 }
 
@@ -379,7 +387,7 @@ applyPatch () {
     echo "PATCH APPLICATION FAILED"
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 patch.  The patch command could not apply the patch."
+    {color:red}-1 patch{color}.  The patch command could not apply the patch."
     return 1
   fi
   return 0
@@ -410,18 +418,19 @@ checkJavadocWarnings () {
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
 
-  #There are 6 warnings that are caused by things that are caused by using sun internal APIs.
-  OK_JAVADOC_WARNINGS=6;
+  # There are 14 warnings that are caused by things that are caused by using sun
+  # internal APIs, and using Maven plugin annotations in comments.
+  OK_JAVADOC_WARNINGS=14;
   ### if current warnings greater than OK_JAVADOC_WARNINGS
   if [[ $javadocWarnings -ne $OK_JAVADOC_WARNINGS ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 javadoc.  The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
+    {color:red}-1 javadoc{color}.  The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 javadoc.  The javadoc tool did not generate any warning messages."
+    {color:green}+1 javadoc{color}.  The javadoc tool did not generate any warning messages."
   return 0
 }
 
@@ -437,12 +446,12 @@ checkJavacWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Pnative -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
-  $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Pnative -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess $NATIVE_PROFILE -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+  $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess $NATIVE_PROFILE -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 javac.  The patch appears to cause the build to fail."
+    {color:red}-1 javac{color:red}.  The patch appears to cause the build to fail."
     return 2
   fi
   ### Compare trunk and patch javac warning numbers
@@ -456,7 +465,7 @@ checkJavacWarnings () {
       if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
         JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 javac.  The applied patch generated $patchJavacWarnings javac compiler warnings (more than the trunk's current $trunkJavacWarnings warnings)."
+      {color:red}-1 javac{color}.  The applied patch generated $patchJavacWarnings javac compiler warnings (more than the trunk's current $trunkJavacWarnings warnings)."
 
     $DIFF $PATCH_DIR/filteredTrunkJavacWarnings.txt $PATCH_DIR/filteredPatchJavacWarnings.txt > $PATCH_DIR/diffJavacWarnings.txt 
         JIRA_COMMENT_FOOTER="Javac warnings: $BUILD_URL/artifact/trunk/patchprocess/diffJavacWarnings.txt
@@ -468,7 +477,7 @@ $JIRA_COMMENT_FOOTER"
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 javac.  The applied patch does not increase the total number of javac compiler warnings."
+    {color:green}+1 javac{color}.  The applied patch does not increase the total number of javac compiler warnings."
   return 0
 }
 
@@ -498,7 +507,7 @@ checkReleaseAuditWarnings () {
       if [[ $patchReleaseAuditWarnings -gt 0 ]] ; then
         JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 release audit.  The applied patch generated $patchReleaseAuditWarnings release audit warnings."
+        {color:red}-1 release audit{color}.  The applied patch generated $patchReleaseAuditWarnings release audit warnings."
         $GREP '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt > $PATCH_DIR/patchReleaseAuditProblems.txt
         echo "Lines that start with ????? in the release audit report indicate files that do not have an Apache license header." >> $PATCH_DIR/patchReleaseAuditProblems.txt
         JIRA_COMMENT_FOOTER="Release audit warnings: $BUILD_URL/artifact/trunk/patchprocess/patchReleaseAuditProblems.txt
@@ -509,7 +518,7 @@ $JIRA_COMMENT_FOOTER"
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 release audit.  The applied patch does not increase the total number of release audit warnings."
+    {color:green}+1 release audit{color}.  The applied patch does not increase the total number of release audit warnings."
   return 0
 }
 
@@ -538,12 +547,12 @@ $JIRA_COMMENT_FOOTER"
 #  if [[ $patchStyleErrors != 0 ]] ; then
 #    JIRA_COMMENT="$JIRA_COMMENT
 #
-#    -1 checkstyle.  The patch generated $patchStyleErrors code style errors."
+#    {color:red}-1 checkstyle{color}.  The patch generated $patchStyleErrors code style errors."
 #    return 1
 #  fi
 #  JIRA_COMMENT="$JIRA_COMMENT
 #
-#    +1 checkstyle.  The patch generated 0 code style errors."
+#    {color:green}+1 checkstyle{color}.  The patch generated 0 code style errors."
   return 0
 }
 
@@ -595,7 +604,7 @@ checkFindbugsWarnings () {
   if [ $rc != 0 ] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 findbugs.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
+    {color:red}-1 findbugs{color}.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
     return 1
   fi
     
@@ -628,12 +637,12 @@ $JIRA_COMMENT_FOOTER"
   if [[ $findbugsWarnings -gt 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 findbugs.  The patch appears to introduce $findbugsWarnings new Findbugs (version ${findbugs_version}) warnings."
+    {color:red}-1 findbugs{color}.  The patch appears to introduce $findbugsWarnings new Findbugs (version ${findbugs_version}) warnings."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 findbugs.  The patch does not introduce any new Findbugs (version ${findbugs_version}) warnings."
+    {color:green}+1 findbugs{color}.  The patch does not introduce any new Findbugs (version ${findbugs_version}) warnings."
   return 0
 }
 
@@ -655,12 +664,12 @@ checkEclipseGeneration () {
   if [[ $? != 0 ]] ; then
       JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 eclipse:eclipse.  The patch failed to build with eclipse:eclipse."
+    {color:red}-1 eclipse:eclipse{color}.  The patch failed to build with eclipse:eclipse."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 eclipse:eclipse.  The patch built with eclipse:eclipse."
+    {color:green}+1 eclipse:eclipse{color}.  The patch built with eclipse:eclipse."
   return 0
 }
 
@@ -681,12 +690,46 @@ runTests () {
 
   failed_tests=""
   modules=$(findModules)
-  for module in $modules;
-  do
+  #
+  # If we are building hadoop-hdfs-project, we must build the native component
+  # of hadoop-common-project first.  In order to accomplish this, we move the
+  # hadoop-hdfs subprojects to the end of the list so that common will come
+  # first.
+  #
+  # Of course, we may not be building hadoop-common at all-- in this case, we
+  # explicitly insert a mvn compile -Pnative of common, to ensure that the
+  # native libraries show up where we need them.
+  #
+  building_common=0
+  for module in $modules; do
+      if [[ $module == hadoop-hdfs-project* ]]; then
+          hdfs_modules="$hdfs_modules $module"
+      elif [[ $module == hadoop-common-project* ]]; then
+          ordered_modules="$ordered_modules $module"
+          building_common=1
+      else
+          ordered_modules="$ordered_modules $module"
+      fi
+  done
+  if [ -n $hdfs_modules ]; then
+      ordered_modules="$ordered_modules $hdfs_modules"
+      if [[ $building_common -eq 0 ]]; then
+          echo "  Building hadoop-common with -Pnative in order to provide \
+libhadoop.so to the hadoop-hdfs unit tests."
+          echo "  $MVN compile $NATIVE_PROFILE -D${PROJECT_NAME}PatchProcess"
+          if ! $MVN compile $NATIVE_PROFILE -D${PROJECT_NAME}PatchProcess; then
+              JIRA_COMMENT="$JIRA_COMMENT
+        {color:red}-1 core tests{color}.  Failed to build the native portion \
+of hadoop-common prior to running the unit tests in $ordered_modules"
+              return 1
+          fi
+      fi
+  fi
+  for module in $ordered_modules; do
     cd $module
     echo "  Running tests in $module"
-    echo "  $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess"
-    $MVN clean install -fn -Pnative -D${PROJECT_NAME}PatchProcess
+    echo "  $MVN clean install -fn $NATIVE_PROFILE $REQUIRE_TEST_LIB_HADOOP -D${PROJECT_NAME}PatchProcess"
+    $MVN clean install -fn $NATIVE_PROFILE $REQUIRE_TEST_LIB_HADOOP -D${PROJECT_NAME}PatchProcess
     module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
     # With -fn mvn always exits with a 0 exit code.  Because of this we need to
     # find the errors instead of using the exit code.  We assume that if the build
@@ -700,13 +743,13 @@ ${module_failed_tests}"
   if [[ -n "$failed_tests" ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 core tests.  The patch failed these unit tests in $modules:
+    {color:red}-1 core tests{color}.  The patch failed these unit tests in $modules:
 $failed_tests"
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 core tests.  The patch passed unit tests in $modules."
+    {color:green}+1 core tests{color}.  The patch passed unit tests in $modules."
   return 0
 }
 
@@ -729,7 +772,7 @@ findModule (){
 findModules () {
   # Come up with a list of changed files into $TMP
   TMP=/tmp/tmp.paths.$$
-  $GREP '^+++\|^---' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
+  $GREP '^+++ \|^--- ' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
   
   # if all of the lines start with a/ or b/, then this is a git patch that
   # was generated without --no-prefix
@@ -782,12 +825,12 @@ runContribTests () {
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 contrib tests.  The patch failed contrib unit tests."
+    {color:red}-1 contrib tests{color}.  The patch failed contrib unit tests."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 contrib tests.  The patch passed contrib unit tests."
+    {color:green}+1 contrib tests{color}.  The patch passed contrib unit tests."
   return 0
 }
 
@@ -814,12 +857,12 @@ checkInjectSystemFaults () {
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
-    -1 system test framework.  The patch failed system test framework compile."
+    {color:red}-1 system test framework{color}.  The patch failed system test framework compile."
     return 1
   fi
   JIRA_COMMENT="$JIRA_COMMENT
 
-    +1 system test framework.  The patch passed system test framework compile."
+    {color:green}+1 system test framework{color}.  The patch passed system test framework compile."
   return 0
 }
 
@@ -832,11 +875,11 @@ submitJiraComment () {
     JIRA_COMMENT_FOOTER=""
   fi
   if [[ $result == 0 ]] ; then
-    comment="+1 overall.  $JIRA_COMMENT
+    comment="{color:green}+1 overall{color}.  $JIRA_COMMENT
 
 $JIRA_COMMENT_FOOTER"
   else
-    comment="-1 overall.  $JIRA_COMMENT
+    comment="{color:red}-1 overall{color}.  $JIRA_COMMENT
 
 $JIRA_COMMENT_FOOTER"
   fi
@@ -914,6 +957,7 @@ if [[ $RESULT != 0 ]] ; then
 fi
 buildWithPatch
 checkAuthor
+(( RESULT = RESULT + $? ))
 
 if [[ $JENKINS == "true" ]] ; then
   cleanUpXml

Modified: hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Fri Oct 19 02:25:55 2012
@@ -111,9 +111,9 @@
       <outputDirectory>/share/doc/hadoop/${hadoop.component}</outputDirectory>
     </fileSet>
     <fileSet>
-      <directory>${basedir}/src/main/native</directory>
+      <directory>${basedir}/src/main/native/libhdfs</directory>
       <includes>
-        <include>*.h</include>
+        <include>hdfs.h</include>
       </includes>
       <outputDirectory>/include</outputDirectory>
     </fileSet>

Modified: hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-assemblies/src/main/resources/assemblies/hadoop-mapreduce-dist.xml Fri Oct 19 02:25:55 2012
@@ -24,19 +24,6 @@
   <includeBaseDirectory>false</includeBaseDirectory>
   <fileSets>
     <fileSet>
-      <directory>hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/target/native/target/usr/local/bin</directory>
-      <outputDirectory>bin</outputDirectory>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
-      <directory>hadoop-yarn/bin</directory>
-      <outputDirectory>bin</outputDirectory>
-      <includes>
-        <include>yarn</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
       <directory>bin</directory>
       <outputDirectory>bin</outputDirectory>
       <includes>
@@ -53,25 +40,6 @@
       <fileMode>0755</fileMode>
     </fileSet>
     <fileSet>
-      <directory>hadoop-yarn/bin</directory>
-      <outputDirectory>libexec</outputDirectory>
-      <includes>
-        <include>yarn-config.sh</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
-      <directory>hadoop-yarn/bin</directory>
-      <outputDirectory>sbin</outputDirectory>
-      <includes>
-        <include>yarn-daemon.sh</include>
-        <include>yarn-daemons.sh</include>
-        <include>start-yarn.sh</include>
-        <include>stop-yarn.sh</include>
-      </includes>
-      <fileMode>0755</fileMode>
-    </fileSet>
-    <fileSet>
       <directory>bin</directory>
       <outputDirectory>sbin</outputDirectory>
       <includes>
@@ -80,7 +48,7 @@
       <fileMode>0755</fileMode>
     </fileSet>
     <fileSet>
-      <directory>hadoop-yarn/conf</directory>
+      <directory>conf</directory>
       <outputDirectory>etc/hadoop</outputDirectory>
       <includes>
         <include>**/*</include>
@@ -126,9 +94,6 @@
   </fileSets>
   <moduleSets>
     <moduleSet>
-      <excludes>
-        <exclude>org.apache.hadoop:hadoop-yarn-server-tests</exclude>
-      </excludes>
       <binaries>
         <outputDirectory>share/hadoop/${hadoop.component}</outputDirectory>
         <includeDependencies>false</includeDependencies>
@@ -138,6 +103,7 @@
     <moduleSet>
       <includes>
         <include>org.apache.hadoop:hadoop-mapreduce-client-jobclient</include>
+        <include>org.apache.hadoop:hadoop-yarn-server-tests</include>
       </includes>
       <binaries>
         <attachmentClassifier>tests</attachmentClassifier>
@@ -155,10 +121,18 @@
       <excludes>
         <exclude>org.apache.hadoop:hadoop-common</exclude>
         <exclude>org.apache.hadoop:hadoop-hdfs</exclude>
-	<!-- use slf4j from common to avoid multiple binding warnings -->
-	<exclude>org.slf4j:slf4j-api</exclude>
-	<exclude>org.slf4j:slf4j-log4j12</exclude>
+        <!-- use slf4j from common to avoid multiple binding warnings -->
+        <exclude>org.slf4j:slf4j-api</exclude>
+        <exclude>org.slf4j:slf4j-log4j12</exclude>
+        <exclude>org.hsqldb:hsqldb</exclude>
       </excludes>
     </dependencySet>
+    <dependencySet>
+      <useProjectArtifact>false</useProjectArtifact>
+      <outputDirectory>/share/hadoop/${hadoop.component}/lib-examples</outputDirectory>
+      <includes>
+        <include>org.hsqldb:hsqldb</include>
+      </includes>
+    </dependencySet>
   </dependencySets>
 </assembly>

Modified: hadoop/common/branches/HDFS-2802/hadoop-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-client/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-client/pom.xml Fri Oct 19 02:25:55 2012
@@ -40,10 +40,6 @@
       <scope>compile</scope>
       <exclusions>
         <exclusion>
-          <groupId>commons-cli</groupId>
-          <artifactId>commons-cli</artifactId>
-        </exclusion>
-        <exclusion>
           <groupId>commons-httpclient</groupId>
           <artifactId>commons-httpclient</artifactId>
         </exclusion>

Modified: hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-dist/pom.xml Fri Oct 19 02:25:55 2012
@@ -52,11 +52,6 @@
       <artifactId>hadoop-yarn-api</artifactId>
       <scope>provided</scope>
     </dependency>
-    <dependency>
-      <groupId>org.apache.hadoop</groupId>
-      <artifactId>hadoop-hdfs-raid</artifactId>
-      <scope>provided</scope>
-    </dependency>
   </dependencies>
 
   <build>
@@ -125,7 +120,7 @@
                       run cp -r $ROOT/hadoop-common-project/hadoop-common/target/hadoop-common-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs/target/hadoop-hdfs-${project.version}/* .
                       run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-httpfs/target/hadoop-hdfs-httpfs-${project.version}/* .
-                      run cp -r $ROOT/hadoop-hdfs-project/hadoop-hdfs-raid/target/hadoop-hdfs-raid-${project.version}/* .
+                      run cp -r $ROOT/hadoop-yarn-project/target/hadoop-yarn-project-${project.version}/* .
                       run cp -r $ROOT/hadoop-mapreduce-project/target/hadoop-mapreduce-${project.version}/* .
                       run cp -r $ROOT/hadoop-tools/hadoop-tools-dist/target/hadoop-tools-dist-${project.version}/* .
                       echo

Modified: hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-project/pom.xml Fri Oct 19 02:25:55 2012
@@ -120,6 +120,12 @@
 
       <dependency>
         <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-client</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+
+      <dependency>
+        <groupId>org.apache.hadoop</groupId>
         <artifactId>hadoop-mapreduce-client-core</artifactId>
         <version>${project.version}</version>
       </dependency>
@@ -197,6 +203,12 @@
         <type>test-jar</type>
       </dependency>
 
+     <dependency>
+        <groupId>org.apache.hadoop</groupId>
+        <artifactId>hadoop-yarn-applications-distributedshell</artifactId>
+        <version>${project.version}</version>
+      </dependency>
+
       <dependency>
         <groupId>org.apache.hadoop</groupId>
          <artifactId>hadoop-mapreduce-client-jobclient</artifactId>
@@ -257,11 +269,6 @@
         <artifactId>hadoop-client</artifactId>
         <version>${project.version}</version>
       </dependency>
-      <dependency>
-        <groupId>org.apache.hadoop</groupId>
-        <artifactId>hadoop-hdfs-raid</artifactId>
-        <version>${project.version}</version>
-      </dependency>
 
       <dependency>
         <groupId>org.apache.hadoop</groupId>
@@ -457,7 +464,7 @@
       <dependency>
         <groupId>log4j</groupId>
         <artifactId>log4j</artifactId>
-        <version>1.2.15</version>
+        <version>1.2.17</version>
         <exclusions>
           <exclusion>
             <groupId>com.sun.jdmk</groupId>
@@ -648,7 +655,11 @@
         <version>4.0.0</version>
         <scope>compile</scope>
       </dependency>
-
+      <dependency>
+        <groupId>org.hsqldb</groupId>
+        <artifactId>hsqldb</artifactId>
+        <version>2.0.0</version>
+      </dependency>
     </dependencies>
   </dependencyManagement>
 
@@ -681,7 +692,7 @@
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-surefire-plugin</artifactId>
-          <version>2.12</version>
+          <version>2.12.3</version>
         </plugin>
         <plugin>
           <groupId>org.apache.maven.plugins</groupId>
@@ -689,6 +700,11 @@
           <version>2.3.1</version>
         </plugin>
         <plugin>
+          <groupId>org.apache.hadoop.cmake.maven.ng</groupId>
+          <artifactId>cmake-ng</artifactId>
+          <version>3.0.0-SNAPSHOT</version>
+        </plugin>
+        <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-jar-plugin</artifactId>
           <version>2.3.1</version>
@@ -714,11 +730,6 @@
           <version>2.3.2</version>
         </plugin>
         <plugin>
-          <groupId>com.atlassian.maven.plugins</groupId>
-          <artifactId>maven-clover2-plugin</artifactId>
-          <version>3.0.5</version>
-        </plugin>
-        <plugin>
           <groupId>org.apache.maven.plugins</groupId>
           <artifactId>maven-checkstyle-plugin</artifactId>
           <version>2.6</version>
@@ -817,9 +828,9 @@
         <configuration>
           <forkMode>always</forkMode>
           <forkedProcessTimeoutInSeconds>900</forkedProcessTimeoutInSeconds>
-          <argLine>-Xmx1024m</argLine>
+          <argLine>-Xmx1024m -XX:+HeapDumpOnOutOfMemoryError</argLine>
           <environmentVariables>
-            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib</LD_LIBRARY_PATH>
+            <LD_LIBRARY_PATH>${env.LD_LIBRARY_PATH}:${project.build.directory}/native/target/usr/local/lib:${basedir}/../../hadoop-common-project/hadoop-common/target/native/target/usr/local/lib/</LD_LIBRARY_PATH>
             <MALLOC_ARENA_MAX>4</MALLOC_ARENA_MAX>
           </environmentVariables>
           <systemPropertyVariables>
@@ -837,6 +848,7 @@
             <java.net.preferIPv4Stack>true</java.net.preferIPv4Stack>
             <java.security.krb5.conf>${basedir}/src/test/resources/krb5.conf</java.security.krb5.conf>
             <java.security.egd>file:///dev/urandom</java.security.egd>
+            <require.test.libhadoop>${require.test.libhadoop}</require.test.libhadoop>
           </systemPropertyVariables>
           <includes>
             <include>**/Test*.java</include>
@@ -891,54 +903,6 @@
         <build.platform>Mac_OS_X-${sun.arch.data.model}</build.platform>
       </properties>
     </profile>
-
-    <profile>
-      <id>clover</id>
-      <activation>
-        <activeByDefault>false</activeByDefault>
-        <property>
-          <name>clover</name>
-        </property>
-      </activation>
-      <properties>
-        <cloverLicenseLocation>${user.home}/.clover.license</cloverLicenseLocation>
-        <cloverDatabase>${project.build.directory}/clover/hadoop-coverage.db</cloverDatabase>
-      </properties>
-      <build>
-        <plugins>
-          <plugin>
-            <groupId>com.atlassian.maven.plugins</groupId>
-            <artifactId>maven-clover2-plugin</artifactId>
-            <configuration>
-              <includesAllSourceRoots>true</includesAllSourceRoots>
-              <includesTestSourceRoots>true</includesTestSourceRoots>
-              <licenseLocation>${cloverLicenseLocation}</licenseLocation>
-              <cloverDatabase>${cloverDatabase}</cloverDatabase>
-              <targetPercentage>50%</targetPercentage>
-              <outputDirectory>${project.build.directory}/clover</outputDirectory>
-              <generateHtml>true</generateHtml>
-              <generateXml>true</generateXml>
-            </configuration>
-            <executions>
-              <execution>
-                <id>clover-setup</id>
-                <phase>process-sources</phase>
-                <goals>
-                  <goal>setup</goal>
-                </goals>
-              </execution>
-              <execution>
-                <id>clover</id>
-                <phase>test</phase>
-                <goals>
-                  <goal>clover</goal>
-                </goals>
-              </execution>
-            </executions>
-          </plugin>
-        </plugins>
-      </build>
-    </profile>
     <profile>
       <id>test-patch</id>
       <activation>

Modified: hadoop/common/branches/HDFS-2802/hadoop-project/src/site/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-project/src/site/site.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-project/src/site/site.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-project/src/site/site.xml Fri Oct 19 02:25:55 2012
@@ -54,7 +54,8 @@
     </menu>
     
     <menu name="HDFS" inherit="top">
-      <item name="High Availability" href="hadoop-yarn/hadoop-yarn-site/HDFSHighAvailability.html"/>
+      <item name="High Availability With QJM" href="hadoop-yarn/hadoop-yarn-site/HDFSHighAvailabilityWithQJM.html"/>
+      <item name="High Availability With NFS" href="hadoop-yarn/hadoop-yarn-site/HDFSHighAvailabilityWithNFS.html"/>
       <item name="Federation" href="hadoop-yarn/hadoop-yarn-site/Federation.html"/>
       <item name="WebHDFS REST API" href="hadoop-yarn/hadoop-yarn-site/WebHDFS.html"/>
       <item name="HttpFS Gateway" href="hadoop-hdfs-httpfs/index.html"/>
@@ -66,6 +67,7 @@
       <item name="Writing Yarn Applications" href="hadoop-yarn/hadoop-yarn-site/WritingYarnApplications.html"/>
       <item name="Capacity Scheduler" href="hadoop-yarn/hadoop-yarn-site/CapacityScheduler.html"/>
       <item name="Web Application Proxy" href="hadoop-yarn/hadoop-yarn-site/WebApplicationProxy.html"/>
+      <item name="Encrypted Shuffle" href="hadoop-yarn/hadoop-yarn-site/EncryptedShuffle.html"/>
       <item name="Yarn Commands" href="hadoop-yarn/hadoop-yarn-site/YarnCommands.html"/>
     </menu>
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-archives/src/test/java/org/apache/hadoop/tools/TestHadoopArchives.java Fri Oct 19 02:25:55 2012
@@ -52,11 +52,11 @@ public class TestHadoopArchives extends 
 
   {
     ((Log4JLogger)LogFactory.getLog(org.apache.hadoop.security.Groups.class)
-        ).getLogger().setLevel(Level.OFF);
+        ).getLogger().setLevel(Level.ERROR);
     ((Log4JLogger)org.apache.hadoop.ipc.Server.LOG
-        ).getLogger().setLevel(Level.OFF);
+        ).getLogger().setLevel(Level.ERROR);
     ((Log4JLogger)org.apache.hadoop.util.AsyncDiskService.LOG
-        ).getLogger().setLevel(Level.OFF);
+        ).getLogger().setLevel(Level.ERROR);
   }
 
   private static final String inputDir = "input";

Propchange: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-datajoin/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Fri Oct 19 02:25:55 2012
@@ -1 +1,4 @@
 target
+.classpath
+.project
+.settings

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/DistCp.java Fri Oct 19 02:25:55 2012
@@ -98,6 +98,11 @@ public class DistCp extends Configured i
    * @return On success, it returns 0. Else, -1.
    */
   public int run(String[] argv) {
+    if (argv.length < 1) {
+      OptionsParser.usage();
+      return DistCpConstants.INVALID_ARGUMENT;
+    }
+    
     try {
       inputOptions = (OptionsParser.parse(argv));
 
@@ -359,18 +364,20 @@ public class DistCp extends Configured i
    * @param argv Command-line arguments sent to DistCp.
    */
   public static void main(String argv[]) {
+    int exitCode;
     try {
       DistCp distCp = new DistCp();
       Cleanup CLEANUP = new Cleanup(distCp);
 
       ShutdownHookManager.get().addShutdownHook(CLEANUP,
         SHUTDOWN_HOOK_PRIORITY);
-      System.exit(ToolRunner.run(getDefaultConf(), distCp, argv));
+      exitCode = ToolRunner.run(getDefaultConf(), distCp, argv);
     }
     catch (Exception e) {
       LOG.error("Couldn't complete DistCp operation: ", e);
-      System.exit(DistCpConstants.UNKNOWN_ERROR);
+      exitCode = DistCpConstants.UNKNOWN_ERROR;
     }
+    System.exit(exitCode);
   }
 
   /**

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/OptionsParser.java Fri Oct 19 02:25:55 2012
@@ -156,6 +156,10 @@ public class OptionsParser {
       try {
         Integer mapBandwidth = Integer.parseInt(
             getVal(command, DistCpOptionSwitch.BANDWIDTH.getSwitch()).trim());
+        if (mapBandwidth.intValue() <= 0) {
+          throw new IllegalArgumentException("Bandwidth specified is not positive: " +
+              mapBandwidth);
+        }
         option.setMapBandwidth(mapBandwidth);
       } catch (NumberFormatException e) {
         throw new IllegalArgumentException("Bandwidth specified is invalid: " +

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/CopyMapper.java Fri Oct 19 02:25:55 2012
@@ -255,7 +255,7 @@ public class CopyMapper extends Mapper<T
 
     long bytesCopied;
     try {
-      bytesCopied = (Long)new RetriableFileCopyCommand(description)
+      bytesCopied = (Long)new RetriableFileCopyCommand(skipCrc, description)
                        .execute(sourceFileStatus, target, context, fileAttributes);
     } catch (Exception e) {
       context.setStatus("Copy Failure: " + sourceFileStatus.getPath());

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/main/java/org/apache/hadoop/tools/mapred/RetriableFileCopyCommand.java Fri Oct 19 02:25:55 2012
@@ -41,7 +41,8 @@ public class RetriableFileCopyCommand ex
 
   private static Log LOG = LogFactory.getLog(RetriableFileCopyCommand.class);
   private static int BUFFER_SIZE = 8 * 1024;
-
+  private boolean skipCrc = false;
+  
   /**
    * Constructor, taking a description of the action.
    * @param description Verbose description of the copy operation.
@@ -49,6 +50,17 @@ public class RetriableFileCopyCommand ex
   public RetriableFileCopyCommand(String description) {
     super(description);
   }
+ 
+  /**
+   * Create a RetriableFileCopyCommand.
+   *
+   * @param skipCrc Whether to skip the crc check.
+   * @param description A verbose description of the copy operation.
+   */
+  public RetriableFileCopyCommand(boolean skipCrc, String description) {
+    this(description);
+    this.skipCrc = skipCrc;
+  }
 
   /**
    * Implementation of RetriableCommand::doExecute().
@@ -91,7 +103,10 @@ public class RetriableFileCopyCommand ex
                                      context, fileAttributes);
 
       compareFileLengths(sourceFileStatus, tmpTargetPath, configuration, bytesRead);
-      compareCheckSums(sourceFS, sourceFileStatus.getPath(), targetFS, tmpTargetPath);
+      //At this point, src&dest lengths are same. if length==0, we skip checksum
+      if ((bytesRead != 0) && (!skipCrc)) {
+        compareCheckSums(sourceFS, sourceFileStatus.getPath(), targetFS, tmpTargetPath);
+      }
       promoteTmpToTarget(tmpTargetPath, target, targetFS);
       return bytesRead;
 

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestIntegration.java Fri Oct 19 02:25:55 2012
@@ -21,8 +21,11 @@ package org.apache.hadoop.tools;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FSDataInputStream;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.Cluster;
+import org.apache.hadoop.mapreduce.JobSubmissionFiles;
 import org.apache.hadoop.tools.util.TestDistCpUtils;
 import org.junit.Assert;
 import org.junit.BeforeClass;
@@ -30,6 +33,8 @@ import org.junit.Test;
 
 import java.io.IOException;
 import java.io.OutputStream;
+import java.util.ArrayList;
+import java.util.List;
 
 public class TestIntegration {
   private static final Log LOG = LogFactory.getLog(TestIntegration.class);
@@ -317,6 +322,58 @@ public class TestIntegration {
       TestDistCpUtils.delete(fs, root);
     }
   }
+  
+  @Test
+  public void testDeleteMissingInDestination() {
+    
+    try {
+      addEntries(listFile, "srcdir");
+      createFiles("srcdir/file1", "dstdir/file1", "dstdir/file2");
+      
+      Path target = new Path(root + "/dstdir");
+      runTest(listFile, target, true, true, false);
+      
+      checkResult(target, 1, "file1");
+    } catch (IOException e) {
+      LOG.error("Exception encountered while running distcp", e);
+      Assert.fail("distcp failure");
+    } finally {
+      TestDistCpUtils.delete(fs, root);
+      TestDistCpUtils.delete(fs, "target/tmp1");
+    }
+  }
+  
+  @Test
+  public void testOverwrite() {
+    byte[] contents1 = "contents1".getBytes();
+    byte[] contents2 = "contents2".getBytes();
+    Assert.assertEquals(contents1.length, contents2.length);
+    
+    try {
+      addEntries(listFile, "srcdir");
+      createWithContents("srcdir/file1", contents1);
+      createWithContents("dstdir/file1", contents2);
+      
+      Path target = new Path(root + "/dstdir");
+      runTest(listFile, target, false, false, true);
+      
+      checkResult(target, 1, "file1");
+      
+      // make sure dstdir/file1 has been overwritten with the contents
+      // of srcdir/file1
+      FSDataInputStream is = fs.open(new Path(root + "/dstdir/file1"));
+      byte[] dstContents = new byte[contents1.length];
+      is.readFully(dstContents);
+      is.close();
+      Assert.assertArrayEquals(contents1, dstContents);
+    } catch (IOException e) {
+      LOG.error("Exception encountered while running distcp", e);
+      Assert.fail("distcp failure");
+    } finally {
+      TestDistCpUtils.delete(fs, root);
+      TestDistCpUtils.delete(fs, "target/tmp1");
+    }
+  }
 
   @Test
   public void testGlobTargetMissingSingleLevel() {
@@ -410,7 +467,33 @@ public class TestIntegration {
       TestDistCpUtils.delete(fs, "target/tmp1");
     }
   }
+  
+  @Test
+  public void testCleanup() {
+    try {
+      Path sourcePath = new Path("noscheme:///file");
+      List<Path> sources = new ArrayList<Path>();
+      sources.add(sourcePath);
+
+      DistCpOptions options = new DistCpOptions(sources, target);
+
+      Configuration conf = getConf();
+      Path stagingDir = JobSubmissionFiles.getStagingDir(
+              new Cluster(conf), conf);
+      stagingDir.getFileSystem(conf).mkdirs(stagingDir);
 
+      try {
+        new DistCp(conf, options).execute();
+      } catch (Throwable t) {
+        Assert.assertEquals(stagingDir.getFileSystem(conf).
+            listStatus(stagingDir).length, 0);
+      }
+    } catch (Exception e) {
+      LOG.error("Exception encountered ", e);
+      Assert.fail("testCleanup failed " + e.getMessage());
+    }
+  }
+  
   private void addEntries(Path listFile, String... entries) throws IOException {
     OutputStream out = fs.create(listFile);
     try {
@@ -434,16 +517,32 @@ public class TestIntegration {
       }
     }
   }
+  
+  private void createWithContents(String entry, byte[] contents) throws IOException {
+    OutputStream out = fs.create(new Path(root + "/" + entry));
+    try {
+      out.write(contents);
+    } finally {
+      out.close();
+    }
+  }
 
   private void mkdirs(String... entries) throws IOException {
     for (String entry : entries){
       fs.mkdirs(new Path(entry));
     }
   }
-
+    
   private void runTest(Path listFile, Path target, boolean sync) throws IOException {
+    runTest(listFile, target, sync, false, false);
+  }
+  
+  private void runTest(Path listFile, Path target, boolean sync, boolean delete,
+      boolean overwrite) throws IOException {
     DistCpOptions options = new DistCpOptions(listFile, target);
     options.setSyncFolder(sync);
+    options.setDeleteMissing(delete);
+    options.setOverwrite(overwrite);
     try {
       new DistCp(getConf(), options).execute();
     } catch (Exception e) {

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-distcp/src/test/java/org/apache/hadoop/tools/TestOptionsParser.java Fri Oct 19 02:25:55 2012
@@ -110,6 +110,24 @@ public class TestOptionsParser {
         "hdfs://localhost:8020/target/"});
     Assert.assertEquals(options.getMapBandwidth(), 11);
   }
+  
+  @Test(expected=IllegalArgumentException.class)
+  public void testParseNonPositiveBandwidth() {
+    OptionsParser.parse(new String[] {
+        "-bandwidth",
+        "-11",
+        "hdfs://localhost:8020/source/first",
+        "hdfs://localhost:8020/target/"});
+  }
+  
+  @Test(expected=IllegalArgumentException.class)
+  public void testParseZeroBandwidth() {
+    OptionsParser.parse(new String[] {
+        "-bandwidth",
+        "0",
+        "hdfs://localhost:8020/source/first",
+        "hdfs://localhost:8020/target/"});
+  }
 
   @Test
   public void testParseSkipCRC() {

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestCopyFiles.java Fri Oct 19 02:25:55 2012
@@ -61,9 +61,9 @@ import org.junit.Ignore;
 public class TestCopyFiles extends TestCase {
   {
     ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange")
-        ).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+        ).getLogger().setLevel(Level.ERROR);
+    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+    ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
     ((Log4JLogger)DistCpV1.LOG).getLogger().setLevel(Level.ALL);
   }
   

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-extras/src/test/java/org/apache/hadoop/tools/TestDistCh.java Fri Oct 19 02:25:55 2012
@@ -46,9 +46,9 @@ import org.junit.Ignore;
 public class TestDistCh extends junit.framework.TestCase {
   {
     ((Log4JLogger)LogFactory.getLog("org.apache.hadoop.hdfs.StateChange")
-        ).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.OFF);
-    ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.OFF);
+        ).getLogger().setLevel(Level.ERROR);
+    ((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ERROR);
+    ((Log4JLogger)LogFactory.getLog(FSNamesystem.class)).getLogger().setLevel(Level.ERROR);
   }
 
   static final Long RANDOM_NUMBER_GENERATOR_SEED = null;

Propchange: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-gridmix/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Fri Oct 19 02:25:55 2012
@@ -1 +1,4 @@
 target
+.classpath
+.project
+.settings

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/pom.xml Fri Oct 19 02:25:55 2012
@@ -40,38 +40,23 @@
       <build>
         <plugins>
           <plugin>
-            <groupId>org.apache.maven.plugins</groupId>
-            <artifactId>maven-antrun-plugin</artifactId>
+            <groupId>org.apache.hadoop.cmake.maven.ng</groupId>
+            <artifactId>cmake-ng</artifactId>
             <executions>
               <execution>
-                <id>make</id>
-                <phase>compile</phase>
-                <goals><goal>run</goal></goals>
+                <id>cmake-compile</id>
+                <goals><goal>compile</goal></goals>
                 <configuration>
-                  <target>
-                    <mkdir dir="${project.build.directory}/native"/>
-                    <exec executable="cmake" dir="${project.build.directory}/native" 
-                        failonerror="true">
-                      <arg line="${basedir}/src/ -DJVM_ARCH_DATA_MODEL=${sun.arch.data.model}"/>
-                    </exec>
-                    <exec executable="make" dir="${project.build.directory}/native" failonerror="true">
-                      <arg line="VERBOSE=1"/>
-                    </exec>
-                  </target>
+                  <target>all</target>
+                  <source>${basedir}/src</source>
+                  <vars>
+                    <JVM_ARCH_DATA_MODEL>${sun.arch.data.model}</JVM_ARCH_DATA_MODEL>
+                  </vars>
+                  <env>
+                    <CFLAGS>${container-executor.additional_cflags}</CFLAGS>
+                  </env>
                 </configuration>
               </execution>
-              <!-- TODO wire here native testcases
-              <execution>
-                <id>test</id>
-                <phase>test</phase>
-                <goals>
-                  <goal>test</goal>
-                </goals>
-                <configuration>
-                  <destDir>${project.build.directory}/native/target</destDir>
-                </configuration>
-              </execution>
-              -->
             </executions>
           </plugin>
         </plugins>

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/src/CMakeLists.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/src/CMakeLists.txt?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/src/CMakeLists.txt (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-pipes/src/CMakeLists.txt Fri Oct 19 02:25:55 2012
@@ -26,17 +26,7 @@ set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} 
 set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
 set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D_REENTRANT -D_FILE_OFFSET_BITS=64")
 
-if (JVM_ARCH_DATA_MODEL EQUAL 32)
-    # force 32-bit code generation on amd64/x86_64, ppc64, sparc64
-    if (CMAKE_COMPILER_IS_GNUCC AND CMAKE_SYSTEM_PROCESSOR MATCHES ".*64")
-        set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -m32")
-        set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -m32")
-        set(CMAKE_LD_FLAGS "${CMAKE_LD_FLAGS} -m32")
-    endif ()
-    if (CMAKE_SYSTEM_PROCESSOR STREQUAL "x86_64" OR CMAKE_SYSTEM_PROCESSOR STREQUAL "amd64")
-        set(CMAKE_SYSTEM_PROCESSOR "i686")
-    endif ()
-endif (JVM_ARCH_DATA_MODEL EQUAL 32)
+include(../../../hadoop-common-project/hadoop-common/src/JNIFlags.cmake NO_POLICY_SCOPE)
 
 function(output_directory TGT DIR)
     SET_TARGET_PROPERTIES(${TGT} PROPERTIES
@@ -80,7 +70,6 @@ add_library(hadooppipes STATIC
     main/native/pipes/impl/HadoopPipes.cc
 )
 target_link_libraries(hadooppipes
-    ${JAVA_JVM_LIBRARY}
     ${OPENSSL_LIBRARIES}
     pthread
 )

Modified: hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java (original)
+++ hadoop/common/branches/HDFS-2802/hadoop-tools/hadoop-streaming/src/main/java/org/apache/hadoop/streaming/StreamJob.java Fri Oct 19 02:25:55 2012
@@ -958,7 +958,6 @@ public class StreamJob implements Tool {
       if (!b)
         fail(LINK_URI);
     }
-    DistributedCache.createSymlink(jobConf_);
     // set the jobconf for the caching parameters
     if (cacheArchives != null)
       DistributedCache.setCacheArchives(archiveURIs, jobConf_);

Modified: hadoop/common/branches/HDFS-2802/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-2802/pom.xml?rev=1399950&r1=1399949&r2=1399950&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-2802/pom.xml (original)
+++ hadoop/common/branches/HDFS-2802/pom.xml Fri Oct 19 02:25:55 2012
@@ -23,6 +23,17 @@ xsi:schemaLocation="http://maven.apache.
   <name>Apache Hadoop Main</name>
   <packaging>pom</packaging>
 
+  <dependencyManagement>
+    <dependencies>
+      <dependency>
+        <groupId>com.cenqua.clover</groupId>
+        <artifactId>clover</artifactId>
+        <!-- Use the version needed by maven-clover-plugin -->
+        <version>3.0.2</version>
+      </dependency>
+    </dependencies>
+  </dependencyManagement>
+
   <distributionManagement>
     <repository>
       <id>apache.staging.https</id>
@@ -73,11 +84,13 @@ xsi:schemaLocation="http://maven.apache.
   </properties>
 
   <modules>
+    <module>dev-support</module>
     <module>hadoop-project</module>
     <module>hadoop-project-dist</module>
     <module>hadoop-assemblies</module>
     <module>hadoop-common-project</module>
     <module>hadoop-hdfs-project</module>
+    <module>hadoop-yarn-project</module>
     <module>hadoop-mapreduce-project</module>
     <module>hadoop-tools</module>
     <module>hadoop-dist</module>
@@ -242,6 +255,11 @@ xsi:schemaLocation="http://maven.apache.
             </lifecycleMappingMetadata>
           </configuration>
         </plugin>
+        <plugin>
+          <groupId>com.atlassian.maven.plugins</groupId>
+          <artifactId>maven-clover2-plugin</artifactId>
+          <version>3.0.5</version>
+        </plugin>
       </plugins>
     </pluginManagement>
 
@@ -364,6 +382,18 @@ xsi:schemaLocation="http://maven.apache.
         </reportSets>
       </plugin>
 
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-dependency-plugin</artifactId>
+        <version>2.4</version>
+        <reportSets>
+          <reportSet>
+            <reports>
+              <report>analyze-report</report>
+            </reports>
+          </reportSet>
+        </reportSets>
+      </plugin>
     </plugins>
   </reporting>
   
@@ -470,6 +500,52 @@ xsi:schemaLocation="http://maven.apache.
         </plugins>
       </build>
     </profile>
-
+    <profile>
+      <id>clover</id>
+      <activation>
+        <activeByDefault>false</activeByDefault>
+        <property>
+          <name>clover</name>
+        </property>
+      </activation>
+      <properties>
+        <cloverLicenseLocation>${user.home}/.clover.license</cloverLicenseLocation>
+        <cloverDatabase>${project.build.directory}/clover/hadoop-coverage.db</cloverDatabase>
+      </properties>
+      <build>
+        <plugins>
+          <plugin>
+            <groupId>com.atlassian.maven.plugins</groupId>
+            <artifactId>maven-clover2-plugin</artifactId>
+            <configuration>
+              <includesAllSourceRoots>true</includesAllSourceRoots>
+              <includesTestSourceRoots>true</includesTestSourceRoots>
+              <licenseLocation>${cloverLicenseLocation}</licenseLocation>
+              <cloverDatabase>${cloverDatabase}</cloverDatabase>
+              <targetPercentage>50%</targetPercentage>
+              <outputDirectory>${project.build.directory}/clover</outputDirectory>
+              <generateHtml>true</generateHtml>
+              <generateXml>true</generateXml>
+            </configuration>
+            <executions>
+              <execution>
+                <id>clover-setup</id>
+                <phase>process-sources</phase>
+                <goals>
+                  <goal>setup</goal>
+                </goals>
+              </execution>
+              <execution>
+                <id>clover</id>
+                <phase>test</phase>
+                <goals>
+                  <goal>clover</goal>
+                </goals>
+              </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
   </profiles>
 </project>