You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by at...@apache.org on 2011/09/14 00:49:37 UTC

svn commit: r1170378 - in /hadoop/common/branches/HDFS-1623: ./ dev-support/smart-apply-patch.sh dev-support/test-patch.sh hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

Author: atm
Date: Tue Sep 13 22:49:27 2011
New Revision: 1170378

URL: http://svn.apache.org/viewvc?rev=1170378&view=rev
Log:
Merge trunk into HA branch

Modified:
    hadoop/common/branches/HDFS-1623/   (props changed)
    hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh
    hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml

Propchange: hadoop/common/branches/HDFS-1623/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Sep 13 22:49:27 2011
@@ -1 +1 @@
-/hadoop/common/trunk:1152502-1166484
+/hadoop/common/trunk:1152502-1170371

Modified: hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh Tue Sep 13 22:49:27 2011
@@ -39,40 +39,68 @@ fi
 # Come up with a list of changed files into $TMP
 TMP=/tmp/tmp.paths.$$
 TOCLEAN="$TOCLEAN $TMP"
-grep '^+++\|^---' $PATCH_FILE | cut -c '5-' | grep -v /dev/null | sort | uniq > $TMP
 
-# Assume p0 to start
-PLEVEL=0
-
-# if all of the lines start with a/ or b/, then this is a git patch that
-# was generated without --no-prefix
-if ! grep -qv '^a/\|^b/' $TMP ; then
-  echo Looks like this is a git patch. Stripping a/ and b/ prefixes
-  echo and incrementing PLEVEL
-  PLEVEL=$[$PLEVEL + 1]
-  sed -i -e 's,^[ab]/,,' $TMP
-fi
-
-PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
-
-# if we are at the project root then nothing more to do
-if [[ -d hadoop-common-project ]]; then
-  echo Looks like this is being run at project root
-
-# if all of the lines start with hadoop-common/, hadoop-hdfs/, or hadoop-mapreduce/, this is
-# relative to the hadoop root instead of the subproject root, so we need
-# to chop off another layer
-elif [[ "$PREFIX_DIRS" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
-
-  echo Looks like this is relative to project root. Increasing PLEVEL
-  PLEVEL=$[$PLEVEL + 1]
-
-elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
-  echo Looks like this is a cross-subproject patch. Try applying from the project root
-  exit 1
+if $PATCH -p0 -E --dry-run < $PATCH_FILE 2>&1 > $TMP; then
+  PLEVEL=0
+  #if the patch applied at P0 there is the possability that all we are doing
+  # is adding new files and they would apply anywhere. So try to guess the
+  # correct place to put those files.
+
+  TMP2=/tmp/tmp.paths.2.$$
+  TOCLEAN="$TOCLEAN $TMP2"
+
+  grep '^patching file ' $TMP | awk '{print $3}' | grep -v /dev/null | sort | uniq > $TMP2
+
+  #first off check that all of the files do not exist
+  FOUND_ANY=0
+  for CHECK_FILE in $(cat $TMP2)
+  do
+    if [[ -f $CHECK_FILE ]]; then
+      FOUND_ANY=1
+    fi
+  done
+
+  if [[ "$FOUND_ANY" = "0" ]]; then
+    #all of the files are new files so we have to guess where the correct place to put it is.
+
+    # if all of the lines start with a/ or b/, then this is a git patch that
+    # was generated without --no-prefix
+    if ! grep -qv '^a/\|^b/' $TMP2 ; then
+      echo Looks like this is a git patch. Stripping a/ and b/ prefixes
+      echo and incrementing PLEVEL
+      PLEVEL=$[$PLEVEL + 1]
+      sed -i -e 's,^[ab]/,,' $TMP2
+    fi
+
+    PREFIX_DIRS_AND_FILES=$(cut -d '/' -f 1 | sort | uniq)
+
+    # if we are at the project root then nothing more to do
+    if [[ -d hadoop-common-project ]]; then
+      echo Looks like this is being run at project root
+
+    # if all of the lines start with hadoop-common/, hadoop-hdfs/, or hadoop-mapreduce/, this is
+    # relative to the hadoop root instead of the subproject root, so we need
+    # to chop off another layer
+    elif [[ "$PREFIX_DIRS_AND_FILES" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
+
+      echo Looks like this is relative to project root. Increasing PLEVEL
+      PLEVEL=$[$PLEVEL + 1]
+
+    elif ! echo "$PREFIX_DIRS_AND_FILES" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
+      echo Looks like this is a cross-subproject patch. Try applying from the project root
+      cleanup 1
+    fi
+  fi
+elif $PATCH -p1 -E --dry-run < $PATCH_FILE 2>&1 > /dev/null; then
+  PLEVEL=1
+elif $PATCH -p2 -E --dry-run < $PATCH_FILE 2>&1 > /dev/null; then
+  PLEVEL=2
+else
+  echo "The patch does not appear to apply with p0 to p2";
+  cleanup 1;
 fi
 
 echo Going to apply patch with: $PATCH -p$PLEVEL
 $PATCH -p$PLEVEL -E < $PATCH_FILE
 
-cleanup 0
+cleanup $?

Modified: hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh Tue Sep 13 22:49:27 2011
@@ -64,6 +64,7 @@ printUsage() {
   echo "--findbugs-home=<path> Findbugs home directory (default FINDBUGS_HOME environment variable)"
   echo "--forrest-home=<path>  Forrest home directory (default FORREST_HOME environment variable)"
   echo "--dirty-workspace      Allow the local SVN workspace to have uncommitted changes"
+  echo "--run-tests            Run all tests below the base directory"
   echo
   echo "Jenkins-only options:"
   echo "--jenkins              Run by Jenkins (runs tests and posts results to JIRA)"
@@ -130,6 +131,9 @@ parseArgs() {
     --dirty-workspace)
       DIRTY_WORKSPACE=true
       ;;
+    --run-tests)
+      RUN_TESTS=true
+      ;;
     *)
       PATCH_OR_DEFECT=$i
       ;;
@@ -249,6 +253,18 @@ setup () {
   echo "======================================================================"
   echo ""
   echo ""
+  if [[ ! -d hadoop-common-project ]]; then
+    cd $bindir/..
+    echo "Compiling $(pwd)"
+    echo "$MVN clean test -DskipTests > $PATCH_DIR/trunkCompile.txt 2>&1"
+    $MVN clean test -DskipTests > $PATCH_DIR/trunkCompile.txt 2>&1
+    if [[ $? != 0 ]] ; then
+      echo "Top-level trunk compilation is broken?"
+      cleanupAndExit 1
+    fi
+    cd -
+  fi
+  echo "Compiling $(pwd)"
   echo "$MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
   $MVN clean test -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
@@ -550,8 +566,10 @@ checkFindbugsWarnings () {
     $FINDBUGS_HOME/bin/convertXmlToText -html \
       $PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.xml \
       $PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.html
-    JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/patchprocess/newPatchFindbugsWarnings${module_suffix}.html
+    if [[ $newFindbugsWarnings > 0 ]] ; then
+      JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/$(basename $BASEDIR)/patchprocess/newPatchFindbugsWarnings${module_suffix}.html
 $JIRA_COMMENT_FOOTER"
+    fi
   done
 
   ### if current warnings greater than OK_FINDBUGS_WARNINGS
@@ -580,26 +598,12 @@ runTests () {
   echo ""
   echo ""
   
-  failed_tests=""
-  modules=$(findModules)
-  for module in $modules;
-  do
-    pushd $module
-      echo "    Running tests in $module"
-      ### Kill any rogue build processes from the last attempt
-      $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
-
-      echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
-      $MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
-      if [[ $? != 0 ]] ; then
-        ### Find and format names of failed tests
-        module_failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
-        failed_tests="${failed_tests}
-${module_failed_tests}"
-      fi
-    popd
-  done
-  echo $failed_tests
+  echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
+  $MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
+  if [[ $? != 0 ]] ; then
+    ### Find and format names of failed tests
+    failed_tests=`find . -name 'TEST*.xml' | xargs $GREP  -l -E "<failure|<error" | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
+  fi
   
   if [[ -n "$failed_tests" ]] ; then
   
@@ -616,36 +620,6 @@ $failed_tests"
 }
 
 ###############################################################################
-### Find the modules changed by the patch
-
-findModules () {
-  # Come up with a list of changed files into $TMP
-  TMP=/tmp/tmp.paths.$$
-  $GREP '^+++\|^---' $PATCH_DIR/patch | cut -c '5-' | $GREP -v /dev/null | sort | uniq > $TMP
-
-  # if all of the lines start with a/ or b/, then this is a git patch that
-  # was generated without --no-prefix
-  if ! $GREP -qv '^a/\|^b/' $TMP ; then
-    sed -i -e 's,^[ab]/,,' $TMP
-  fi
-
-  PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
-
-  # if all of the lines start with hadoop-common-project/, hadoop-hdfs-project/, or hadoop-mapreduce-project/, this is
-  # relative to the hadoop root instead of the subproject root
-  if [[ "$PREFIX_DIRS" =~ ^(hadoop-common-project|hadoop-hdfs-project|hadoop-mapreduce-project)$ ]]; then
-    echo $PREFIX_DIRS
-    return 0
-  elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common-project\|hadoop-hdfs-project\|hadoop-mapreduce-project' ; then
-    echo $PREFIX_DIRS
-    return 0
-  fi
-  
-  # No modules found. Running from current directory.
-  echo .
-}
-
-###############################################################################
 ### Run the test-contrib target
 runContribTests () {
   echo ""
@@ -820,8 +794,8 @@ checkFindbugsWarnings
 (( RESULT = RESULT + $? ))
 checkReleaseAuditWarnings
 (( RESULT = RESULT + $? ))
-### Do not call these when run by a developer 
-if [[ $JENKINS == "true" ]] ; then
+### Run tests for Jenkins or if explictly asked for by a developer
+if [[ $JENKINS == "true" || $RUN_TESTS == "true" ]] ; then
   runTests
   (( RESULT = RESULT + $? ))
   runContribTests

Modified: hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-assemblies/src/main/resources/assemblies/hadoop-dist.xml Tue Sep 13 22:49:27 2011
@@ -79,6 +79,13 @@
       </includes>
     </fileSet>
     <fileSet>
+      <directory>${basedir}/src/main/packages/templates/conf</directory>
+      <outputDirectory>/share/hadoop/${hadoop.component}/templates/conf</outputDirectory>
+      <includes>
+        <include>*</include>
+      </includes>
+    </fileSet>
+    <fileSet>
       <directory>${project.build.directory}</directory>
       <outputDirectory>/share/hadoop/${hadoop.component}</outputDirectory>
       <includes>