You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2011/08/16 02:37:23 UTC

svn commit: r1158072 - in /hadoop/common/branches/HDFS-1623: ./ dev-support/ hadoop-annotations/ hadoop-assemblies/ hadoop-common/ hadoop-common/dev-support/ hadoop-common/src/main/docs/ hadoop-common/src/main/java/ hadoop-common/src/main/java/org/apac...

Author: todd
Date: Tue Aug 16 00:37:15 2011
New Revision: 1158072

URL: http://svn.apache.org/viewvc?rev=1158072&view=rev
Log:
Merge trunk into HDFS-1623 branch.

Added:
    hadoop/common/branches/HDFS-1623/hadoop-common/dev-support/test-patch.properties
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/dev-support/test-patch.properties
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/java/org/apache/hadoop/util/NativeCrc32.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/
      - copied from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/NativeCrc32.c
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.c
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/bulk_crc32.h
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/crc32_zlib_polynomial_tables.h
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/crc32_zlib_polynomial_tables.h
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/crc32c_tables.h
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/crc32c_tables.h
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h
      - copied unchanged from r1158071, hadoop/common/trunk/hadoop-common/src/main/native/src/org/apache/hadoop/util/gcc_optimizations.h
Removed:
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/CyclicIteration.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestCyclicIteration.java
Modified:
    hadoop/common/branches/HDFS-1623/   (props changed)
    hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh
    hadoop/common/branches/HDFS-1623/dev-support/test-patch.properties
    hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh
    hadoop/common/branches/HDFS-1623/hadoop-annotations/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-assemblies/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-assemblies/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt   (contents, props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/LICENSE.txt
    hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/docs/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/Makefile.am
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/core/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
    hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java
    hadoop/common/branches/HDFS-1623/hadoop-project/   (props changed)
    hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml
    hadoop/common/branches/HDFS-1623/pom.xml

Propchange: hadoop/common/branches/HDFS-1623/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Aug 16 00:37:15 2011
@@ -0,0 +1,5 @@
+.classpath
+.git
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1 +1 @@
-/hadoop/common/trunk:1152502-1153927
+/hadoop/common/trunk:1152502-1158071

Modified: hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/smart-apply-patch.sh Tue Aug 16 00:37:15 2011
@@ -53,16 +53,22 @@ if ! grep -qv '^a/\|^b/' $TMP ; then
   sed -i -e 's,^[ab]/,,' $TMP
 fi
 
-# if all of the lines start with common/, hdfs/, or mapreduce/, this is
+PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
+
+# if we are at the project root then nothing more to do
+if [[ -d hadoop-common ]]; then
+  echo Looks like this is being run at project root
+
+# if all of the lines start with hadoop-common/, hdfs/, or mapreduce/, this is
 # relative to the hadoop root instead of the subproject root, so we need
 # to chop off another layer
-PREFIX_DIRS=$(cut -d '/' -f 1 $TMP | sort | uniq)
-if [[ "$PREFIX_DIRS" =~ ^(hdfs|common|mapreduce)$ ]]; then
+elif [[ "$PREFIX_DIRS" =~ ^(hdfs|hadoop-common|mapreduce)$ ]]; then
 
   echo Looks like this is relative to project root. Increasing PLEVEL
   PLEVEL=$[$PLEVEL + 1]
-elif ! echo "$PREFIX_DIRS" | grep -vxq 'common\|hdfs\|mapreduce' ; then
-  echo Looks like this is a cross-subproject patch. Not supported!
+
+elif ! echo "$PREFIX_DIRS" | grep -vxq 'hadoop-common\|hdfs\|mapreduce' ; then
+  echo Looks like this is a cross-subproject patch. Try applying from the project root
   exit 1
 fi
 

Modified: hadoop/common/branches/HDFS-1623/dev-support/test-patch.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/test-patch.properties?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/test-patch.properties (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/test-patch.properties Tue Aug 16 00:37:15 2011
@@ -13,6 +13,9 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+# The number of acceptable warning for *all* modules
+# Please update the per-module test-patch.properties if you update this file.
+
 OK_RELEASEAUDIT_WARNINGS=0
 OK_FINDBUGS_WARNINGS=0
-OK_JAVADOC_WARNINGS=0
+OK_JAVADOC_WARNINGS=6

Modified: hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh (original)
+++ hadoop/common/branches/HDFS-1623/dev-support/test-patch.sh Tue Aug 16 00:37:15 2011
@@ -19,84 +19,152 @@ ulimit -n 1024
 ### SVN_REVISION and BUILD_URL are set by Hudson if it is run by patch process
 ### Read variables from properties file
 bindir=$(dirname $0)
-. $bindir/test-patch.properties
 
-###############################################################################
-parseArgs() {
-  case "$1" in
-    HUDSON)
-      ### Set HUDSON to true to indicate that this script is being run by Hudson
-      HUDSON=true
-      if [[ $# != 16 ]] ; then
-        echo "ERROR: usage $0 HUDSON <PATCH_DIR> <SUPPORT_DIR> <PS_CMD> <WGET_CMD> <JIRACLI> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <ECLIPSE_HOME> <WORKSPACE_BASEDIR> <JIRA_PASSWD> <CURL_CMD> <DEFECT> "
-        cleanupAndExit 0
-      fi
-      PATCH_DIR=$2
-      SUPPORT_DIR=$3
-      PS=$4
-      WGET=$5
-      JIRACLI=$6
-      SVN=$7
-      GREP=$8
-      PATCH=$9
-      FINDBUGS_HOME=${10}
-      FORREST_HOME=${11}
-      ECLIPSE_HOME=${12}
-      BASEDIR=${13}
-      JIRA_PASSWD=${14}
-      CURL=${15}
-      defect=${16}
-		
-      ### Retrieve the defect number
-      if [ -z "$defect" ] ; then
-        echo "Could not determine the patch to test.  Exiting."
-        cleanupAndExit 0
-      fi
+# Defaults
+if [ -z "$MAVEN_HOME" ]; then
+  MVN=mvn
+else
+  MVN=$MAVEN_HOME/bin/mvn
+fi
 
-      if [ ! -e "$PATCH_DIR" ] ; then
-        mkdir -p $PATCH_DIR 
-      fi
+PROJECT_NAME=Hadoop
+JENKINS=false
+PATCH_DIR=/tmp
+SUPPORT_DIR=/tmp
+BASEDIR=$(pwd)
+
+PS=${PS:-ps}
+AWK=${AWK:-awk}
+WGET=${WGET:-wget}
+SVN=${SVN:-svn}
+GREP=${GREP:-grep}
+PATCH=${PATCH:-patch}
+JIRACLI=${JIRA:-jira}
+FINDBUGS_HOME=${FINDBUGS_HOME}
+FORREST_HOME=${FORREST_HOME}
+ECLIPSE_HOME=${ECLIPSE_HOME}
+
+###############################################################################
+printUsage() {
+  echo "Usage: $0 [options] patch-file | defect-number"
+  echo
+  echo "Where:"
+  echo "  patch-file is a local patch file containing the changes to test"
+  echo "  defect-number is a JIRA defect number (e.g. 'HADOOP-1234') to test (Jenkins only)"
+  echo
+  echo "Options:"
+  echo "--patch-dir=<dir>      The directory for working and output files (default '/tmp')"
+  echo "--basedir=<dir>        The directory to apply the patch to (default current directory)"
+  echo "--mvn-cmd=<cmd>        The 'mvn' command to use (default \$MAVEN_HOME/bin/mvn, or 'mvn')"
+  echo "--ps-cmd=<cmd>         The 'ps' command to use (default 'ps')"
+  echo "--awk-cmd=<cmd>        The 'awk' command to use (default 'awk')"
+  echo "--svn-cmd=<cmd>        The 'svn' command to use (default 'svn')"
+  echo "--grep-cmd=<cmd>       The 'grep' command to use (default 'grep')"
+  echo "--patch-cmd=<cmd>      The 'patch' command to use (default 'patch')"
+  echo "--findbugs-home=<path> Findbugs home directory (default FINDBUGS_HOME environment variable)"
+  echo "--forrest-home=<path>  Forrest home directory (default FORREST_HOME environment variable)"
+  echo "--dirty-workspace      Allow the local SVN workspace to have uncommitted changes"
+  echo
+  echo "Jenkins-only options:"
+  echo "--jenkins              Run by Jenkins (runs tests and posts results to JIRA)"
+  echo "--support-dir=<dir>    The directory to find support files in"
+  echo "--wget-cmd=<cmd>       The 'wget' command to use (default 'wget')"
+  echo "--jira-cmd=<cmd>       The 'jira' command to use (default 'jira')"
+  echo "--jira-password=<pw>   The password for the 'jira' command"
+  echo "--eclipse-home=<path>  Eclipse home directory (default ECLIPSE_HOME environment variable)"
+}
 
-      ECLIPSE_PROPERTY="-Declipse.home=$ECLIPSE_HOME"
+###############################################################################
+parseArgs() {
+  for i in $*
+  do
+    case $i in
+    --jenkins)
+      JENKINS=true
       ;;
-    DEVELOPER)
-      ### Set HUDSON to false to indicate that this script is being run by a developer
-      HUDSON=false
-      if [[ $# != 9 ]] ; then
-        echo "ERROR: usage $0 DEVELOPER <PATCH_FILE> <SCRATCH_DIR> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <WORKSPACE_BASEDIR>"
-        cleanupAndExit 0
-      fi
-      ### PATCH_FILE contains the location of the patchfile
-      PATCH_FILE=$2 
-      if [[ ! -e "$PATCH_FILE" ]] ; then
-        echo "Unable to locate the patch file $PATCH_FILE"
-        cleanupAndExit 0
-      fi
-      PATCH_DIR=$3
-      ### Check if $PATCH_DIR exists. If it does not exist, create a new directory
-      if [[ ! -e "$PATCH_DIR" ]] ; then
-	mkdir "$PATCH_DIR"
-	if [[ $? == 0 ]] ; then 
-	  echo "$PATCH_DIR has been created"
-	else
-	  echo "Unable to create $PATCH_DIR"
-	  cleanupAndExit 0
-	fi
-      fi
-      SVN=$4
-      GREP=$5
-      PATCH=$6
-      FINDBUGS_HOME=$7
-      FORREST_HOME=$8
-      BASEDIR=$9
-      ### Obtain the patch filename to append it to the version number
-      defect=`basename $PATCH_FILE` 
+    --patch-dir=*)
+      PATCH_DIR=${i#*=}
+      ;;
+    --support-dir=*)
+      SUPPORT_DIR=${i#*=}
+      ;;
+    --basedir=*)
+      BASEDIR=${i#*=}
+      ;;
+    --mvn-cmd=*)
+      MVN=${i#*=}
+      ;;
+    --ps-cmd=*)
+      PS=${i#*=}
+      ;;
+    --awk-cmd=*)
+      AWK=${i#*=}
+      ;;
+    --wget-cmd=*)
+      WGET=${i#*=}
+      ;;
+    --svn-cmd=*)
+      SVN=${i#*=}
+      ;;
+    --grep-cmd=*)
+      GREP=${i#*=}
+      ;;
+    --patch-cmd=*)
+      PATCH=${i#*=}
+      ;;
+    --jira-cmd=*)
+      JIRACLI=${i#*=}
+      ;;
+    --jira-password=*)
+      JIRA_PASSWD=${i#*=}
+      ;;
+    --findbugs-home=*)
+      FINDBUGS_HOME=${i#*=}
+      ;;
+    --forrest-home=*)
+      FORREST_HOME=${i#*=}
+      ;;
+    --eclipse-home=*)
+      ECLIPSE_HOME=${i#*=}
+      ;;
+    --dirty-workspace)
+      DIRTY_WORKSPACE=true
       ;;
     *)
-      echo "ERROR: usage $0 HUDSON [args] | DEVELOPER [args]"
-      cleanupAndExit 0
+      PATCH_OR_DEFECT=$i
       ;;
-  esac
+    esac
+  done
+  if [ -z "$PATCH_OR_DEFECT" ]; then
+    printUsage
+    exit 1
+  fi
+  if [[ $JENKINS == "true" ]] ; then
+    echo "Running in Jenkins mode"
+    defect=$PATCH_OR_DEFECT
+    ECLIPSE_PROPERTY="-Declipse.home=$ECLIPSE_HOME"
+  else
+    echo "Running in developer mode"
+    JENKINS=false
+    ### PATCH_FILE contains the location of the patchfile
+    PATCH_FILE=$PATCH_OR_DEFECT
+    if [[ ! -e "$PATCH_FILE" ]] ; then
+      echo "Unable to locate the patch file $PATCH_FILE"
+      cleanupAndExit 0
+    fi
+    ### Check if $PATCH_DIR exists. If it does not exist, create a new directory
+    if [[ ! -e "$PATCH_DIR" ]] ; then
+      mkdir "$PATCH_DIR"
+      if [[ $? == 0 ]] ; then 
+        echo "$PATCH_DIR has been created"
+      else
+        echo "Unable to create $PATCH_DIR"
+        cleanupAndExit 0
+      fi
+    fi
+    ### Obtain the patch filename to append it to the version number
+    defect=`basename $PATCH_FILE`
+  fi
 }
 
 ###############################################################################
@@ -111,9 +179,10 @@ checkout () {
   echo ""
   echo ""
   ### When run by a developer, if the workspace contains modifications, do not continue
+  ### unless the --dirty-workspace option was set
   status=`$SVN stat --ignore-externals | sed -e '/^X[ ]*/D'`
-  if [[ $HUDSON == "false" ]] ; then
-    if [[ "$status" != "" ]] ; then
+  if [[ $JENKINS == "false" ]] ; then
+    if [[ "$status" != "" && -z $DIRTY_WORKSPACE ]] ; then
       echo "ERROR: can't run in a workspace that contains the following modifications"
       echo "$status"
       cleanupAndExit 1
@@ -131,7 +200,7 @@ checkout () {
 ###############################################################################
 setup () {
   ### Download latest patch file (ignoring .htm and .html) when run from patch process
-  if [[ $HUDSON == "true" ]] ; then
+  if [[ $JENKINS == "true" ]] ; then
     $WGET -q -O $PATCH_DIR/jira http://issues.apache.org/jira/browse/$defect
     if [[ `$GREP -c 'Patch Available' $PATCH_DIR/jira` == 0 ]] ; then
       echo "$defect is not \"Patch Available\".  Exiting."
@@ -162,6 +231,7 @@ setup () {
       cleanupAndExit 0
     fi
   fi
+  . $BASEDIR/dev-support/test-patch.properties
   ### exit if warnings are NOT defined in the properties file
   if [ -z "$OK_FINDBUGS_WARNINGS" ] || [[ -z "$OK_JAVADOC_WARNINGS" ]] || [[ -z $OK_RELEASEAUDIT_WARNINGS ]]; then
     echo "Please define the following properties in test-patch.properties file"
@@ -179,9 +249,8 @@ setup () {
   echo "======================================================================"
   echo ""
   echo ""
-#  echo "$ANT_HOME/bin/ant  -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
-# $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
-  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
+  echo "$MVN clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
+  $MVN clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     echo "Trunk compilation is broken?"
     cleanupAndExit 1
@@ -229,7 +298,7 @@ checkTests () {
   testReferences=`$GREP -c -i '/test' $PATCH_DIR/patch`
   echo "There appear to be $testReferences test files referenced in the patch."
   if [[ $testReferences == 0 ]] ; then
-    if [[ $HUDSON == "true" ]] ; then
+    if [[ $JENKINS == "true" ]] ; then
       patchIsDoc=`$GREP -c -i 'title="documentation' $PATCH_DIR/jira`
       if [[ $patchIsDoc != 0 ]] ; then
         echo "The patch appears to be a documentation patch that doesn't require tests."
@@ -297,12 +366,15 @@ checkJavadocWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
-  (cd root; mvn install -DskipTests)
-  (cd doclet; mvn install -DskipTests)
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
-  $MAVEN_HOME/bin/mvn clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
-  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | awk '/Javadoc Warnings/,EOF' | $GREP -v 'Javadoc Warnings' | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
+  echo "$MVN clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1"
+  if [ -d hadoop-project ]; then
+    (cd hadoop-project; $MVN install)
+  fi
+  if [ -d hadoop-annotations ]; then  
+    (cd hadoop-annotations; $MVN install)
+  fi
+  $MVN clean compile javadoc:javadoc -DskipTests -Pdocs -D${PROJECT_NAME}PatchProcess > $PATCH_DIR/patchJavadocWarnings.txt 2>&1
+  javadocWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavadocWarnings.txt | $AWK '/Javadoc Warnings/,EOF' | $GREP warning | $AWK 'BEGIN {total = 0} {total += 1} END {print total}'`
   echo ""
   echo ""
   echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
@@ -332,9 +404,8 @@ checkJavacWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= clean tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
-  $MAVEN_HOME/bin/mvn clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+  echo "$MVN clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+  $MVN clean compile -DskipTests -D${PROJECT_NAME}PatchProcess -Ptest-patch > $PATCH_DIR/patchJavacWarnings.txt 2>&1
   if [[ $? != 0 ]] ; then
     JIRA_COMMENT="$JIRA_COMMENT
 
@@ -343,8 +414,8 @@ checkJavacWarnings () {
   fi
   ### Compare trunk and patch javac warning numbers
   if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
-    trunkJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/trunkJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
-    patchJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavacWarnings.txt | awk 'BEGIN {total = 0} {total += 1} END {print total}'`
+    trunkJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/trunkJavacWarnings.txt | $AWK 'BEGIN {total = 0} {total += 1} END {print total}'`
+    patchJavacWarnings=`$GREP '\[WARNING\]' $PATCH_DIR/patchJavacWarnings.txt | $AWK 'BEGIN {total = 0} {total += 1} END {print total}'`
     echo "There appear to be $trunkJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
     if [[ $patchJavacWarnings != "" && $trunkJavacWarnings != "" ]] ; then
       if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
@@ -373,9 +444,8 @@ checkReleaseAuditWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
-  $MAVEN_HOME/bin/mvn apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
+  echo "$MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1"
+  $MVN apache-rat:check -D${PROJECT_NAME}PatchProcess 2>&1
   find . -name rat.txt | xargs cat > $PATCH_DIR/patchReleaseAuditWarnings.txt
 
   ### Compare trunk and patch release audit warning numbers
@@ -418,9 +488,8 @@ checkStyle () {
   echo "THIS IS NOT IMPLEMENTED YET"
   echo ""
   echo ""
-  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
-  $MAVEN_HOME/bin/mvn compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess
+  echo "$MVN compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess"
+  $MVN compile checkstyle:checkstyle -D${PROJECT_NAME}PatchProcess
 
   JIRA_COMMENT_FOOTER="Checkstyle results: $BUILD_URL/artifact/trunk/build/test/checkstyle-errors.html
 $JIRA_COMMENT_FOOTER"
@@ -451,9 +520,8 @@ checkFindbugsWarnings () {
   echo "======================================================================"
   echo ""
   echo ""
-  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=$FINDBUGS_HOME -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
-  $MAVEN_HOME/bin/mvn clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess -X
+  echo "$MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess"
+  $MVN clean compile findbugs:findbugs -D${PROJECT_NAME}PatchProcess
 
   if [ $? != 0 ] ; then
     JIRA_COMMENT="$JIRA_COMMENT
@@ -461,18 +529,29 @@ checkFindbugsWarnings () {
     -1 findbugs.  The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
     return 1
   fi
-JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings.html
+    
+  findbugsWarnings=0
+  for file in $(find $BASEDIR -name findbugsXml.xml)
+  do
+    relative_file=${file#$BASEDIR/} # strip leading $BASEDIR prefix
+    if [ ! $relative_file == "target/findbugsXml.xml" ]; then
+      module_suffix=${relative_file%/target/findbugsXml.xml} # strip trailing path
+    fi
+    
+    cp $file $PATCH_DIR/patchFindbugsWarnings${module_suffix}.xml
+    $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
+      $PATCH_DIR/patchFindbugsWarnings${module_suffix}.xml \
+      $PATCH_DIR/patchFindbugsWarnings${module_suffix}.xml
+    newFindbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings${module_suffix}.xml \
+      $PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.xml | $AWK '{print $1}'`
+    echo "Found $newFindbugsWarnings Findbugs warnings ($file)"
+    findbugsWarnings=$((findbugsWarnings+newFindbugsWarnings))
+    $FINDBUGS_HOME/bin/convertXmlToText -html \
+      $PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.xml \
+      $PATCH_DIR/newPatchFindbugsWarnings${module_suffix}.html
+    JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/target/newPatchFindbugsWarnings${module_suffix}.html
 $JIRA_COMMENT_FOOTER"
-  
-  cp $BASEDIR/hadoop-common/target/findbugsXml.xml $PATCH_DIR/patchFindbugsWarnings.xml
-  $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
-    $PATCH_DIR/patchFindbugsWarnings.xml \
-    $PATCH_DIR/patchFindbugsWarnings.xml
-  findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
-    $PATCH_DIR/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
-  $FINDBUGS_HOME/bin/convertXmlToText -html \
-    $PATCH_DIR/newPatchFindbugsWarnings.xml \
-    $PATCH_DIR/newPatchFindbugsWarnings.html
+  done
 
   ### if current warnings greater than OK_FINDBUGS_WARNINGS
   if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
@@ -501,15 +580,14 @@ runCoreTests () {
   echo ""
   
   ### Kill any rogue build processes from the last attempt
-  $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
+  $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
   PreTestTarget=""
   if [[ $defect == MAPREDUCE-* ]] ; then
      PreTestTarget="create-c++-configure"
   fi
 
-  #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME $PreTestTarget test-core"
-  #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME  $PreTestTarget test-core
-  $MAVEN_HOME/bin/mvn clean test -Pnative -DHadoopPatchProcess
+  echo "$MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess"
+  $MVN clean test -Pnative -D${PROJECT_NAME}PatchProcess
   if [[ $? != 0 ]] ; then
     ### Find and format names of failed tests
     failed_tests=`grep -l -E "<failure|<error" $WORKSPACE/trunk/target/hadoop-common/surefire-reports/*.xml | sed -e "s|.*target/surefire-reports/TEST-|                  |g" | sed -e "s|\.xml||g"`
@@ -544,7 +622,7 @@ runContribTests () {
   fi
 
   ### Kill any rogue build processes from the last attempt
-  $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
+  $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
   #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib"
   #$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no test-contrib
@@ -575,7 +653,7 @@ checkInjectSystemFaults () {
   echo ""
   
   ### Kill any rogue build processes from the last attempt
-  $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
+  $PS auxwww | $GREP ${PROJECT_NAME}PatchProcess | $AWK '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
 
   #echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults"
   #$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=no -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME inject-system-faults
@@ -597,7 +675,7 @@ checkInjectSystemFaults () {
 submitJiraComment () {
   local result=$1
   ### Do not output the value of JIRA_COMMENT_FOOTER when run by a developer
-  if [[  $HUDSON == "false" ]] ; then
+  if [[  $JENKINS == "false" ]] ; then
     JIRA_COMMENT_FOOTER=""
   fi
   if [[ $result == 0 ]] ; then
@@ -616,7 +694,7 @@ $JIRA_COMMENT_FOOTER"
 
 $comment"  
 
-  if [[ $HUDSON == "true" ]] ; then
+  if [[ $JENKINS == "true" ]] ; then
     echo ""
     echo ""
     echo "======================================================================"
@@ -637,7 +715,7 @@ $comment"  
 ### Cleanup files
 cleanupAndExit () {
   local result=$1
-  if [[ $HUDSON == "true" ]] ; then
+  if [[ $JENKINS == "true" ]] ; then
     if [ -e "$PATCH_DIR" ] ; then
       mv $PATCH_DIR $BASEDIR
     fi
@@ -669,7 +747,7 @@ cd $BASEDIR
 
 checkout
 RESULT=$?
-if [[ $HUDSON == "true" ]] ; then
+if [[ $JENKINS == "true" ]] ; then
   if [[ $RESULT != 0 ]] ; then
     exit 100
   fi
@@ -678,7 +756,7 @@ setup
 checkAuthor
 RESULT=$?
 
-if [[ $HUDSON == "true" ]] ; then
+if [[ $JENKINS == "true" ]] ; then
   cleanUpXml
 fi
 checkTests
@@ -700,7 +778,7 @@ checkFindbugsWarnings
 checkReleaseAuditWarnings
 (( RESULT = RESULT + $? ))
 ### Do not call these when run by a developer 
-if [[ $HUDSON == "true" ]] ; then
+if [[ $JENKINS == "true" ]] ; then
   runCoreTests
   (( RESULT = RESULT + $? ))
   runContribTests

Propchange: hadoop/common/branches/HDFS-1623/hadoop-annotations/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Aug 16 00:37:15 2011
@@ -0,0 +1,5 @@
+.classpath
+.git
+.project
+.settings
+target

Propchange: hadoop/common/branches/HDFS-1623/hadoop-assemblies/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Aug 16 00:37:15 2011
@@ -0,0 +1,5 @@
+.classpath
+.git
+.project
+.settings
+target

Modified: hadoop/common/branches/HDFS-1623/hadoop-assemblies/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-assemblies/pom.xml?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-assemblies/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-assemblies/pom.xml Tue Aug 16 00:37:15 2011
@@ -67,9 +67,6 @@
             <requireJavaVersion>
               <version>1.6</version>
             </requireJavaVersion>
-            <requireOS>
-              <family>unix</family>
-            </requireOS>
           </rules>
         </configuration>
         <executions>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/
------------------------------------------------------------------------------
--- svn:ignore (original)
+++ svn:ignore Tue Aug 16 00:37:15 2011
@@ -1,4 +1,5 @@
 .classpath
+.git
 .project
-build
-build-fi
+.settings
+target

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt Tue Aug 16 00:37:15 2011
@@ -309,11 +309,29 @@ Trunk (unreleased changes)
     HADOOP-7501. Publish Hadoop Common artifacts (post HADOOP-6671) to Apache
     SNAPSHOTs repo. (Alejandro Abdelnur via tomwhite)
 
+    HADOOP-7525. Make arguments to test-patch optional. (tomwhite)
+
+    HADOOP-7472. RPC client should deal with IP address change.
+    (Kihwal Lee via suresh)
+  
+    HADOOP-7499. Add method for doing a sanity check on hostnames in NetUtils.
+    (Jeffrey Naisbit via mahadev)
+
+    HADOOP-6158. Move CyclicIteration to HDFS. (eli)
+
+    HADOOP-7526. Add TestPath tests for URI conversion and reserved
+    characters. (eli)
+
+    HADOOP-7531. Add servlet util methods for handling paths in requests. (eli)
+
   OPTIMIZATIONS
   
     HADOOP-7333. Performance improvement in PureJavaCrc32. (Eric Caspole
     via todd)
 
+    HADOOP-7445. Implement bulk checksum verification using efficient native
+    code. (todd)
+
   BUG FIXES
 
     HADOOP-7327. FileSystem.listStatus() throws NullPointerException instead of
@@ -451,6 +469,34 @@ Trunk (unreleased changes)
 
     HADOOP-7508. Compiled nativelib is in wrong directory and it is not picked
     up by surefire setup. (Alejandro Abdelnur via tomwhite)
+   
+    HADOOP-7520. Fix to add distribution management info to hadoop-main
+    (Alejandro Abdelnur via gkesavan)
+
+    HADOOP-7515. test-patch reports the wrong number of javadoc warnings.
+    (tomwhite)
+
+    HADOOP-7523. Test org.apache.hadoop.fs.TestFilterFileSystem fails due to
+    java.lang.NoSuchMethodException. (John Lee via tomwhite)
+
+    HADOOP-7528. Maven build fails in Windows. (Alejandro Abdelnur via
+    tomwhite)
+
+    HADOOP-7533. Allow test-patch to be run from any subproject directory.
+    (tomwhite)
+
+    HADOOP-7512. Fix example mistake in WritableComparable javadocs.
+    (Harsh J via eli)
+
+    HADOOP-7357. hadoop.io.compress.TestCodec#main() should exit with
+    non-zero exit code if test failed. (Philip Zeyliger via eli)
+
+    HADOOP-6622. Token should not print the password in toString. (eli)
+
+    HADOOP-7529. Fix lock cycles in metrics system. (llu)
+
+    HADOOP-7545. Common -tests JAR should not include properties and configs.
+    (todd)
 
 Release 0.22.0 - Unreleased
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/CHANGES.txt
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,4 +1,5 @@
 /hadoop/common/branches/yahoo-merge/CHANGES.txt:1079157,1079163-1079164,1079167
+/hadoop/common/trunk/hadoop-common/CHANGES.txt:1153185-1158071
 /hadoop/core/branches/branch-0.18/CHANGES.txt:727226
 /hadoop/core/branches/branch-0.19/CHANGES.txt:713112
 /hadoop/core/trunk/CHANGES.txt:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/LICENSE.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/LICENSE.txt?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/LICENSE.txt (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/LICENSE.txt Tue Aug 16 00:37:15 2011
@@ -242,3 +242,12 @@ For the org.apache.hadoop.util.bloom.* c
  * ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE 
  * POSSIBILITY OF SUCH DAMAGE.
  */
+
+For portions of the native implementation of slicing-by-8 CRC calculation
+in src/main/native/src/org/apache/hadoop/util:
+
+/**
+ *   Copyright 2008,2009,2010 Massachusetts Institute of Technology.
+ *   All rights reserved. Use of this source code is governed by a
+ *   BSD-style license that can be found in the LICENSE file.
+ */

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/pom.xml Tue Aug 16 00:37:15 2011
@@ -279,11 +279,23 @@
         <artifactId>maven-jar-plugin</artifactId>
         <executions>
           <execution>
+            <id>prepare-jar</id>
             <phase>prepare-package</phase>
             <goals>
               <goal>jar</goal>
+            </goals>
+          </execution>
+          <execution>
+            <id>prepare-test-jar</id>
+            <phase>prepare-package</phase>
+            <goals>
               <goal>test-jar</goal>
             </goals>
+            <configuration>
+              <includes>
+                <include>**/*.class</include>
+              </includes>
+            </configuration>
           </execution>
         </executions>
       </plugin>
@@ -545,6 +557,7 @@
                     <javahClassName>org.apache.hadoop.security.JniBasedUnixGroupsNetgroupMapping</javahClassName>
                     <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyCompressor</javahClassName>
                     <javahClassName>org.apache.hadoop.io.compress.snappy.SnappyDecompressor</javahClassName>
+                    <javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
                   </javahClassNames>
                   <javahOutputDirectory>${project.build.directory}/native/javah</javahOutputDirectory>
                 </configuration>
@@ -852,11 +865,17 @@
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/tar-copynativelibs.sh">
 
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                        BUILD_DIR="${project.build.directory}"
+                      else
+                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
                       TAR='tar cf -'
                       UNTAR='tar xfBp -'
-                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
                       if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
+                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}/lib/native/${build.platform}"
                       mkdir -p $${TARGET_DIR}
                       cd $${LIB_DIR}
                       $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
@@ -880,11 +899,20 @@
                 </goals>
                 <configuration>
                   <target>
-                    <!-- Using Unix tar to preserve symlinks -->
-                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
-                      <arg value="czf"/>
-                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}.tar.gz"/>
-                      <arg value="${project.artifactId}-${project.version}"/>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/tar-maketar.sh">
+
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                        BUILD_DIR="${project.build.directory}"
+                      else
+                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
+                      cd ${BUILD_DIR}
+                      tar czf ${project.artifactId}-${project.version}.tar.gz ${project.artifactId}-${project.version}
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./tar-maketar.sh"/>
                     </exec>
                   </target>
                 </configuration>
@@ -945,11 +973,17 @@
                     <!-- Using Unix script to preserve symlinks -->
                     <echo file="${project.build.directory}/bintar-copynativelibs.sh">
 
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                        BUILD_DIR="${project.build.directory}"
+                      else
+                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
                       TAR='tar cf -'
                       UNTAR='tar xfBp -'
-                      LIB_DIR="${project.build.directory}/native/target/usr/local/lib"
+                      LIB_DIR="${BUILD_DIR}/native/target/usr/local/lib"
                       if [ -d $${LIB_DIR} ] ; then
-                      TARGET_DIR="${project.build.directory}/${project.artifactId}-${project.version}-bin/lib"
+                      TARGET_DIR="${BUILD_DIR}/${project.artifactId}-${project.version}-bin/lib"
                       mkdir -p $${TARGET_DIR}
                       cd $${LIB_DIR}
                       $$TAR *hadoop* | (cd $${TARGET_DIR}/; $$UNTAR)
@@ -973,11 +1007,20 @@
                 </goals>
                 <configuration>
                   <target>
-                    <!-- Using Unix tar to preserve symlinks -->
-                    <exec executable="tar" dir="${project.build.directory}" failonerror="yes">
-                      <arg value="czf"/>
-                      <arg value="${project.build.directory}/${project.artifactId}-${project.version}-bin.tar.gz"/>
-                      <arg value="${project.artifactId}-${project.version}-bin"/>
+                    <!-- Using Unix script to preserve symlinks -->
+                    <echo file="${project.build.directory}/bintar-maketar.sh">
+
+                      which cygpath 2> /dev/null
+                      if [ $? = 1 ]; then
+                        BUILD_DIR="${project.build.directory}"
+                      else
+                        BUILD_DIR=`cygpath --unix '${project.build.directory}'`
+                      fi
+                      cd ${BUILD_DIR}
+                      tar czf ${project.artifactId}-${project.version}-bin.tar.gz ${project.artifactId}-${project.version}-bin
+                    </echo>
+                    <exec executable="sh" dir="${project.build.directory}" failonerror="true">
+                      <arg line="./bintar-maketar.sh"/>
                     </exec>
                   </target>
                 </configuration>

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/docs/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,2 +1,2 @@
-/hadoop/common/trunk/hadoop-common/src/main/docs:1152502-1153927
+/hadoop/common/trunk/hadoop-common/src/main/docs:1152502-1158071
 /hadoop/core/branches/branch-0.19/src/docs:713112

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common/src/main/java:1152502-1153927
+/hadoop/common/trunk/hadoop-common/src/main/java:1152502-1158071
 /hadoop/core/branches/branch-0.19/core/src/java:713112
 /hadoop/core/trunk/src/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparable.java Tue Aug 16 00:37:15 2011
@@ -52,7 +52,7 @@ import org.apache.hadoop.classification.
  *         timestamp = in.readLong();
  *       }
  *       
- *       public int compareTo(MyWritableComparable w) {
+ *       public int compareTo(MyWritableComparable o) {
  *         int thisValue = this.value;
  *         int thatValue = ((IntWritable)o).value;
  *         return (thisValue &lt; thatValue ? -1 : (thisValue==thatValue ? 0 : 1));

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java Tue Aug 16 00:37:15 2011
@@ -405,6 +405,27 @@ public class Client {
       saslRpcClient = new SaslRpcClient(authMethod, token, serverPrincipal);
       return saslRpcClient.saslConnect(in2, out2);
     }
+
+    /**
+     * Update the server address if the address corresponding to the host
+     * name has changed.
+     *
+     * @return true if an addr change was detected.
+     * @throws IOException when the hostname cannot be resolved.
+     */
+    private synchronized boolean updateAddress() throws IOException {
+      // Do a fresh lookup with the old host name.
+      InetSocketAddress currentAddr =  new InetSocketAddress(
+                               server.getHostName(), server.getPort());
+
+      if (!server.equals(currentAddr)) {
+        LOG.warn("Address change detected. Old: " + server.toString() +
+                                 " New: " + currentAddr.toString());
+        server = currentAddr;
+        return true;
+      }
+      return false;
+    }
     
     private synchronized void setupConnection() throws IOException {
       short ioFailures = 0;
@@ -435,19 +456,28 @@ public class Client {
           }
           
           // connection time out is 20s
-          NetUtils.connect(this.socket, remoteId.getAddress(), 20000);
+          NetUtils.connect(this.socket, server, 20000);
           if (rpcTimeout > 0) {
             pingInterval = rpcTimeout;  // rpcTimeout overwrites pingInterval
           }
           this.socket.setSoTimeout(pingInterval);
           return;
         } catch (SocketTimeoutException toe) {
+          /* Check for an address change and update the local reference.
+           * Reset the failure counter if the address was changed
+           */
+          if (updateAddress()) {
+            timeoutFailures = ioFailures = 0;
+          }
           /*
            * The max number of retries is 45, which amounts to 20s*45 = 15
            * minutes retries.
            */
           handleConnectionFailure(timeoutFailures++, 45, toe);
         } catch (IOException ie) {
+          if (updateAddress()) {
+            timeoutFailures = ioFailures = 0;
+          }
           handleConnectionFailure(ioFailures++, maxRetries, ie);
         }
       }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/DefaultMetricsSystem.java Tue Aug 16 00:37:15 2011
@@ -18,6 +18,7 @@
 
 package org.apache.hadoop.metrics2.lib;
 
+import java.util.concurrent.atomic.AtomicReference;
 import javax.management.ObjectName;
 
 import org.apache.hadoop.classification.InterfaceAudience;
@@ -34,7 +35,8 @@ import org.apache.hadoop.metrics2.impl.M
 public enum DefaultMetricsSystem {
   INSTANCE; // the singleton
 
-  private MetricsSystem impl = new MetricsSystemImpl();
+  private AtomicReference<MetricsSystem> impl =
+      new AtomicReference<MetricsSystem>(new MetricsSystemImpl());
   volatile boolean miniClusterMode = false;
   final UniqueNames mBeanNames = new UniqueNames();
   final UniqueNames sourceNames = new UniqueNames();
@@ -48,8 +50,8 @@ public enum DefaultMetricsSystem {
     return INSTANCE.init(prefix);
   }
 
-  synchronized MetricsSystem init(String prefix) {
-    return impl.init(prefix);
+  MetricsSystem init(String prefix) {
+    return impl.get().init(prefix);
   }
 
   /**
@@ -66,8 +68,9 @@ public enum DefaultMetricsSystem {
     INSTANCE.shutdownInstance();
   }
 
-  synchronized void shutdownInstance() {
-    if (impl.shutdown()) {
+  void shutdownInstance() {
+    boolean last = impl.get().shutdown();
+    if (last) synchronized(this) {
       mBeanNames.map.clear();
       sourceNames.map.clear();
     }
@@ -78,13 +81,11 @@ public enum DefaultMetricsSystem {
     return INSTANCE.setImpl(ms);
   }
 
-  synchronized MetricsSystem setImpl(MetricsSystem ms) {
-    MetricsSystem old = impl;
-    impl = ms;
-    return old;
+  MetricsSystem setImpl(MetricsSystem ms) {
+    return impl.getAndSet(ms);
   }
 
-  synchronized MetricsSystem getImpl() { return impl; }
+  MetricsSystem getImpl() { return impl.get(); }
 
   @InterfaceAudience.Private
   public static void setMiniClusterMode(boolean choice) {

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/CachedDNSToSwitchMapping.java Tue Aug 16 00:37:15 2011
@@ -17,9 +17,6 @@
  */
 package org.apache.hadoop.net;
 
-import java.net.InetAddress;
-import java.net.SocketException;
-import java.net.UnknownHostException;
 import java.util.ArrayList;
 import java.util.List;
 import java.util.Map;
@@ -91,32 +88,6 @@ public class CachedDNSToSwitchMapping im
     return result;
   }
 
-  /**
-   * Resolves host names and adds them to the cache.
-   * Unlike the 'resolve" method, this won't hide UnknownHostExceptions
-   * 
-   * @param names to resolve
-   * @return List of resolved names
-   * @throws UnknownHostException if any hosts cannot be resolved
-   */  
-  public List<String> resolveValidHosts(List<String> names) 
-    throws UnknownHostException {
-    if (names.isEmpty()) {
-      return new ArrayList<String>();
-    }
-    List<String> addresses = new ArrayList<String>(names.size());
-    for (String name : names) {
-      addresses.add(InetAddress.getByName(name).getHostAddress());
-    }
-
-    List<String> uncachedHosts = this.getUncachedHosts(names);
-
-    // Resolve the uncached hosts
-    List<String> resolvedHosts = rawMapping.resolveValidHosts(uncachedHosts);
-    this.cacheResolvedHosts(uncachedHosts, resolvedHosts);
-    return this.getCachedHosts(addresses);
-  }
-
   public List<String> resolve(List<String> names) {
     // normalize all input names to be in the form of IP addresses
     names = NetUtils.normalizeHostNames(names);

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/DNSToSwitchMapping.java Tue Aug 16 00:37:15 2011
@@ -18,7 +18,6 @@
 package org.apache.hadoop.net;
 
 import java.util.List;
-import java.net.UnknownHostException;
 
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
@@ -45,23 +44,4 @@ public interface DNSToSwitchMapping {
    * @return list of resolved network paths
    */
   public List<String> resolve(List<String> names);
-
-  /**
-   * Resolves a list of DNS-names/IP-addresses and returns back a list of
-   * switch information (network paths). One-to-one correspondence must be 
-   * maintained between the elements in the lists. 
-   * Consider an element in the argument list - x.y.com. The switch information
-   * that is returned must be a network path of the form /foo/rack, 
-   * where / is the root, and 'foo' is the switch where 'rack' is connected.
-   * Note the hostname/ip-address is not part of the returned path.
-   * The network topology of the cluster would determine the number of
-   * components in the network path.  Unlike 'resolve', names must be 
-   * resolvable
-   * @param names
-   * @return list of resolved network paths
-   * @throws UnknownHostException if any hosts are not resolvable
-   */
-  public List<String> resolveValidHosts(List<String> names)
-    throws UnknownHostException;
-
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/NetUtils.java Tue Aug 16 00:37:15 2011
@@ -27,6 +27,7 @@ import java.net.Socket;
 import java.net.SocketAddress;
 import java.net.SocketException;
 import java.net.URI;
+import java.net.URISyntaxException;
 import java.net.UnknownHostException;
 import java.net.ConnectException;
 import java.nio.channels.SocketChannel;
@@ -428,6 +429,35 @@ public class NetUtils {
     return hostNames;
   }
 
+  /**
+   * Performs a sanity check on the list of hostnames/IPs to verify they at least
+   * appear to be valid.
+   * @param names - List of hostnames/IPs
+   * @throws UnknownHostException
+   */
+  public static void verifyHostnames(String[] names) throws UnknownHostException {
+    for (String name: names) {
+      if (name == null) {
+        throw new UnknownHostException("null hostname found");
+      }
+      // The first check supports URL formats (e.g. hdfs://, etc.). 
+      // java.net.URI requires a schema, so we add a dummy one if it doesn't
+      // have one already.
+      URI uri = null;
+      try {
+        uri = new URI(name);
+        if (uri.getHost() == null) {
+          uri = new URI("http://" + name);
+        }
+      } catch (URISyntaxException e) {
+        uri = null;
+      }
+      if (uri == null || uri.getHost() == null) {
+        throw new UnknownHostException(name + " is not a valid Inet address");
+      }
+    }
+  }
+
   private static final Pattern ipPattern = // Pattern for matching hostname to ip:port
     Pattern.compile("\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}\\.\\d{1,3}:?\\d*");
   

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/net/ScriptBasedMapping.java Tue Aug 16 00:37:15 2011
@@ -20,7 +20,6 @@ package org.apache.hadoop.net;
 
 import java.util.*;
 import java.io.*;
-import java.net.UnknownHostException;
 
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
@@ -123,17 +122,6 @@ implements Configurable
     
     return m;
   }
-  
-  public List<String> resolveValidHosts(List<String> names) 
-    throws UnknownHostException {
-    List<String> result = this.resolve(names);
-    if (result != null) {
-      return result;
-    } else {
-      throw new UnknownHostException(
-          "Unknown host(s) returned from ScriptBasedMapping");
-    }
-  }
 
   private String runResolveCommand(List<String> args) {
     int loopCount = 0;

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/security/token/Token.java Tue Aug 16 00:37:15 2011
@@ -238,8 +238,6 @@ public class Token<T extends TokenIdenti
     StringBuilder buffer = new StringBuilder();
     buffer.append("Ident: ");
     addBinaryBuffer(buffer, identifier);
-    buffer.append(", Pass: ");
-    addBinaryBuffer(buffer, password);
     buffer.append(", Kind: ");
     buffer.append(kind.toString());
     buffer.append(", Service: ");

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/DataChecksum.java Tue Aug 16 00:37:15 2011
@@ -265,6 +265,11 @@ public class DataChecksum implements Che
           fileName, basePos);
       return;
     }
+    if (NativeCrc32.isAvailable()) {
+      NativeCrc32.verifyChunkedSums(bytesPerChecksum, type, checksums, data,
+          fileName, basePos);
+      return;
+    }
     
     int startDataPos = data.position();
     data.mark();

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/java/org/apache/hadoop/util/ServletUtil.java Tue Aug 16 00:37:15 2011
@@ -21,10 +21,15 @@ import java.io.*;
 import java.util.Calendar;
 
 import javax.servlet.*;
+import javax.servlet.http.HttpServletRequest;
 
+import org.apache.commons.httpclient.URIException;
+import org.apache.commons.httpclient.util.URIUtil;
 import org.apache.hadoop.classification.InterfaceAudience;
 import org.apache.hadoop.classification.InterfaceStability;
 
+import com.google.common.base.Preconditions;
+
 @InterfaceAudience.Private
 @InterfaceStability.Unstable
 public class ServletUtil {
@@ -107,4 +112,55 @@ public class ServletUtil {
   public static String percentageGraph(float perc, int width) throws IOException {
     return percentageGraph((int)perc, width);
   }
-}
+
+  /**
+   * Escape and encode a string regarded as within the query component of an URI.
+   * @param value the value to encode
+   * @return encoded query, null if the default charset is not supported
+   */
+  public static String encodeQueryValue(final String value) {
+    try {
+      return URIUtil.encodeWithinQuery(value, "UTF-8");
+    } catch (URIException e) {
+      throw new AssertionError("JVM does not support UTF-8"); // should never happen!
+    }
+  }
+
+  /**
+   * Escape and encode a string regarded as the path component of an URI.
+   * @param path the path component to encode
+   * @return encoded path, null if UTF-8 is not supported
+   */
+  public static String encodePath(final String path) {
+    try {
+      return URIUtil.encodePath(path, "UTF-8");
+    } catch (URIException e) {
+      throw new AssertionError("JVM does not support UTF-8"); // should never happen!
+    }
+  }
+
+  /**
+   * Parse and decode the path component from the given request.
+   * @param request Http request to parse
+   * @param servletName the name of servlet that precedes the path
+   * @return decoded path component, null if UTF-8 is not supported
+   */
+  public static String getDecodedPath(final HttpServletRequest request, String servletName) {
+    try {
+      return URIUtil.decode(getRawPath(request, servletName), "UTF-8");
+    } catch (URIException e) {
+      throw new AssertionError("JVM does not support UTF-8"); // should never happen!
+    }
+  }
+
+  /**
+   * Parse the path component from the given request and return w/o decoding.
+   * @param request Http request to parse
+   * @param servletName the name of servlet that precedes the path
+   * @return path component, null if the default charset is not supported
+   */
+  public static String getRawPath(final HttpServletRequest request, String servletName) {
+    Preconditions.checkArgument(request.getRequestURI().startsWith(servletName+"/"));
+    return request.getRequestURI().substring(servletName.length());
+  }
+}
\ No newline at end of file

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/Makefile.am?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/Makefile.am (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/main/native/Makefile.am Tue Aug 16 00:37:15 2011
@@ -51,7 +51,9 @@ libhadoop_la_SOURCES = src/org/apache/ha
                        src/org/apache/hadoop/security/JniBasedUnixGroupsNetgroupMapping.c \
                        src/org/apache/hadoop/io/nativeio/file_descriptor.c \
                        src/org/apache/hadoop/io/nativeio/errno_enum.c \
-                       src/org/apache/hadoop/io/nativeio/NativeIO.c
+                       src/org/apache/hadoop/io/nativeio/NativeIO.c \
+                       src/org/apache/hadoop/util/NativeCrc32.c \
+                       src/org/apache/hadoop/util/bulk_crc32.c
 
 libhadoop_la_LDFLAGS = -version-info 1:0:0 $(AM_LDFLAGS)
 libhadoop_la_LIBADD = -ldl -ljvm

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/core/
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,3 +1,3 @@
-/hadoop/common/trunk/hadoop-common/src/test/core:1152502-1153927
+/hadoop/common/trunk/hadoop-common/src/test/core:1152502-1158071
 /hadoop/core/branches/branch-0.19/core/src/test/core:713112
 /hadoop/core/trunk/src/test/core:776175-785643,785929-786278

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFilterFileSystem.java Tue Aug 16 00:37:15 2011
@@ -126,6 +126,8 @@ public class TestFilterFileSystem extend
     public void moveFromLocalFile(Path[] srcs, Path dst) { }
     public void moveFromLocalFile(Path src, Path dst) { }
     public void copyToLocalFile(Path src, Path dst) { }
+    public void copyToLocalFile(boolean delSrc, Path src, Path dst, 
+            boolean useRawLocalFileSystem) { }
     public void moveToLocalFile(Path src, Path dst) { }
     public long getBlockSize(Path f) { return 0; }
     public FSDataOutputStream primitiveCreate(final Path f,

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/fs/TestPath.java Tue Aug 16 00:37:15 2011
@@ -185,6 +185,48 @@ public class TestPath extends TestCase {
     assertEquals("foo://bar/fud#boo", new Path(new Path(new URI(
         "foo://bar/baz#bud")), new Path(new URI("/fud#boo"))).toString());
   }
+
+  /** Test URIs created from Path objects */
+  public void testPathToUriConversion() throws URISyntaxException, IOException {
+    // Path differs from URI in that it ignores the query part..
+    assertEquals(new URI(null, null, "/foo?bar", null, null),  new Path("/foo?bar").toUri());
+    assertEquals(new URI(null, null, "/foo\"bar", null, null), new Path("/foo\"bar").toUri());
+    assertEquals(new URI(null, null, "/foo bar", null, null),  new Path("/foo bar").toUri());
+    // therefore "foo?bar" is a valid Path, so a URI created from a Path has path "foo?bar" 
+    // where in a straight URI the path part is just "foo"
+    assertEquals("/foo?bar", new Path("http://localhost/foo?bar").toUri().getPath());
+    assertEquals("/foo",     new URI("http://localhost/foo?bar").getPath());
+
+    // The path part handling in Path is equivalent to URI
+    assertEquals(new URI("/foo;bar").getPath(), new Path("/foo;bar").toUri().getPath());
+    assertEquals(new URI("/foo;bar"), new Path("/foo;bar").toUri());
+    assertEquals(new URI("/foo+bar"), new Path("/foo+bar").toUri());
+    assertEquals(new URI("/foo-bar"), new Path("/foo-bar").toUri());
+    assertEquals(new URI("/foo=bar"), new Path("/foo=bar").toUri());
+    assertEquals(new URI("/foo,bar"), new Path("/foo,bar").toUri());
+  }
+
+  /** Test reserved characters in URIs (and therefore Paths) */
+  public void testReservedCharacters() throws URISyntaxException, IOException {
+    // URI encodes the path
+    assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).getRawPath());
+    // URI#getPath decodes the path
+    assertEquals("/foo bar",   new URI(null, null, "/foo bar", null, null).getPath());
+    // URI#toString returns an encoded path
+    assertEquals("/foo%20bar", new URI(null, null, "/foo bar", null, null).toString());
+    assertEquals("/foo%20bar", new Path("/foo bar").toUri().toString());
+    // Reserved chars are not encoded
+    assertEquals("/foo;bar",   new URI("/foo;bar").getPath());
+    assertEquals("/foo;bar",   new URI("/foo;bar").getRawPath());
+    assertEquals("/foo+bar",   new URI("/foo+bar").getPath());
+    assertEquals("/foo+bar",   new URI("/foo+bar").getRawPath());
+
+    // URI#getPath decodes the path part (and URL#getPath does not decode)
+    assertEquals("/foo bar",   new Path("http://localhost/foo bar").toUri().getPath());
+    assertEquals("/foo%20bar", new Path("http://localhost/foo bar").toUri().toURL().getPath());
+    assertEquals("/foo?bar",   new URI("http", "localhost", "/foo?bar", null, null).getPath());
+    assertEquals("/foo%3Fbar", new URI("http", "localhost", "/foo?bar", null, null).toURL().getPath());
+  }
   
   public void testMakeQualified() throws URISyntaxException {
     URI defaultUri = new URI("hdfs://host1/dir1");

Propchange: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java
------------------------------------------------------------------------------
--- svn:mergeinfo (original)
+++ svn:mergeinfo Tue Aug 16 00:37:15 2011
@@ -1,2 +1,3 @@
+/hadoop/common/trunk/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java:1153185-1158071
 /hadoop/core/branches/branch-0.19/mapred/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:713112
 /hadoop/core/trunk/src/test/mapred/org/apache/hadoop/io/TestSequenceFile.java:776175-785643

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/io/compress/TestCodec.java Tue Aug 16 00:37:15 2011
@@ -501,7 +501,7 @@ public class TestCodec {
     LOG.info("SUCCESS! Completed SequenceFileCodecTest with codec \"" + codecClass + "\"");
   }
   
-  public static void main(String[] args) {
+  public static void main(String[] args) throws IOException {
     int count = 10000;
     String codecClass = "org.apache.hadoop.io.compress.DefaultCodec";
 
@@ -511,25 +511,20 @@ public class TestCodec {
       System.exit(-1);
     }
 
-    try {
-      for (int i=0; i < args.length; ++i) {       // parse command line
-        if (args[i] == null) {
-          continue;
-        } else if (args[i].equals("-count")) {
-          count = Integer.parseInt(args[++i]);
-        } else if (args[i].equals("-codec")) {
-          codecClass = args[++i];
-        }
+    for (int i=0; i < args.length; ++i) {       // parse command line
+      if (args[i] == null) {
+        continue;
+      } else if (args[i].equals("-count")) {
+        count = Integer.parseInt(args[++i]);
+      } else if (args[i].equals("-codec")) {
+        codecClass = args[++i];
       }
-
-      Configuration conf = new Configuration();
-      int seed = 0;
-      codecTest(conf, seed, count, codecClass);
-    } catch (Exception e) {
-      System.err.println("Caught: " + e);
-      e.printStackTrace();
     }
 
+    Configuration conf = new Configuration();
+    int seed = 0;
+    // Note that exceptions will propagate out.
+    codecTest(conf, seed, count, codecClass);
   }
 
   @Test

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/StaticMapping.java Tue Aug 16 00:37:15 2011
@@ -18,7 +18,6 @@
 package org.apache.hadoop.net;
 
 import java.util.*;
-import java.net.UnknownHostException;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
@@ -60,19 +59,4 @@ public class StaticMapping extends Confi
       return m;
     }
   }
-  public List<String> resolveValidHosts(List<String> names)
-    throws UnknownHostException {
-    List<String> m = new ArrayList<String>();
-    synchronized (nameToRackMap) {
-      for (String name : names) {
-        String rackId;
-        if ((rackId = nameToRackMap.get(name)) != null) {
-          m.add(rackId);
-        } else {
-          throw new UnknownHostException(name);
-        }
-      }
-      return m;
-    }
-  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestNetUtils.java Tue Aug 16 00:37:15 2011
@@ -72,4 +72,20 @@ public class TestNetUtils {
     assertNull(NetUtils.getLocalInetAddress("invalid-address-for-test"));
     assertNull(NetUtils.getLocalInetAddress(null));
   }
+
+  @Test(expected=UnknownHostException.class)
+  public void testVerifyHostnamesException() throws UnknownHostException {
+    String[] names = {"valid.host.com", "1.com", "invalid host here"};
+    NetUtils.verifyHostnames(names);
+  }  
+
+  @Test
+  public void testVerifyHostnamesNoException() {
+    String[] names = {"valid.host.com", "1.com"};
+    try {
+      NetUtils.verifyHostnames(names);
+    } catch (UnknownHostException e) {
+      fail("NetUtils.verifyHostnames threw unexpected UnknownHostException");
+    }
+  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/net/TestScriptBasedMapping.java Tue Aug 16 00:37:15 2011
@@ -19,7 +19,6 @@ package org.apache.hadoop.net;
 
 import java.util.ArrayList;
 import java.util.List;
-import java.net.UnknownHostException;
 
 import org.apache.hadoop.conf.Configuration;
 
@@ -49,37 +48,5 @@ public class TestScriptBasedMapping exte
     List<String> result = mapping.resolve(names);
     assertNull(result);
   }
-  
-  public void testResolveValidInvalidHostException() {
-    names = new ArrayList<String>();
-    names.add("1.com"); // Add invalid hostname that doesn't resolve
-    boolean exceptionThrown = false;
-    try {
-      mapping.resolveValidHosts(names);
-    } catch (UnknownHostException e) {
-      exceptionThrown = true;
-    }
-    assertTrue(
-        "resolveValidHosts did not throw UnknownHostException for invalid host",
-        exceptionThrown);
-  }
-
-  public void testResolveValidHostNoException() {
-    conf.setInt(ScriptBasedMapping.SCRIPT_ARG_COUNT_KEY,
-        ScriptBasedMapping.MIN_ALLOWABLE_ARGS);
-    conf.set(ScriptBasedMapping.SCRIPT_FILENAME_KEY, "echo");
-    mapping.setConf(conf);    
 
-    names = new ArrayList<String>();
-    names.add("some.machine.name");
-    names.add("other.machine.name");
-    
-    boolean exceptionThrown = false;
-    try {
-      mapping.resolveValidHosts(names);
-    } catch (UnknownHostException e) {
-      exceptionThrown = true;
-    }
-    assertFalse("resolveValidHosts threw Exception for valid host", exceptionThrown);
-  }
 }

Modified: hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-common/src/test/java/org/apache/hadoop/util/TestDataChecksum.java Tue Aug 16 00:37:15 2011
@@ -111,6 +111,7 @@ public class TestDataChecksum {
     } catch (ChecksumException ce) {
       int expectedPos = checksum.getBytesPerChecksum() * (numSums - 1);
       assertEquals(expectedPos, ce.getPos());
+      assertTrue(ce.getMessage().contains("fake file"));
     }
   }
 

Propchange: hadoop/common/branches/HDFS-1623/hadoop-project/
------------------------------------------------------------------------------
--- svn:ignore (added)
+++ svn:ignore Tue Aug 16 00:37:15 2011
@@ -0,0 +1,5 @@
+.classpath
+.git
+.project
+.settings
+target

Modified: hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-project/pom.xml Tue Aug 16 00:37:15 2011
@@ -441,6 +441,11 @@
           <artifactId>maven-source-plugin</artifactId>
           <version>2.1.2</version>
         </plugin>
+        <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-deploy-plugin</artifactId>
+          <version>2.5</version>
+        </plugin>
       </plugins>
     </pluginManagement>
 
@@ -457,9 +462,6 @@
             <requireJavaVersion>
               <version>1.6</version>
             </requireJavaVersion>
-            <requireOS>
-              <family>unix</family>
-            </requireOS>
           </rules>
         </configuration>
         <executions>
@@ -502,7 +504,7 @@
       <id>os.linux</id>
       <activation>
         <os>
-          <family>Linux</family>
+          <family>!Mac</family>
         </os>
       </activation>
       <properties>

Modified: hadoop/common/branches/HDFS-1623/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/pom.xml?rev=1158072&r1=1158071&r2=1158072&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/pom.xml Tue Aug 16 00:37:15 2011
@@ -54,6 +54,11 @@
           <version>2.2-beta-3</version>
         </plugin>
         <plugin>
+          <groupId>org.apache.maven.plugins</groupId>
+          <artifactId>maven-deploy-plugin</artifactId>
+          <version>2.5</version>
+        </plugin>
+        <plugin>
           <groupId>org.apache.rat</groupId>
           <artifactId>apache-rat-plugin</artifactId>
           <version>0.7</version>
@@ -74,9 +79,6 @@
             <requireJavaVersion>
               <version>1.6</version>
             </requireJavaVersion>
-            <requireOS>
-              <family>unix</family>
-            </requireOS>
           </rules>
         </configuration>
         <executions>
@@ -104,6 +106,12 @@
         </executions>
       </plugin>
       <plugin>
+        <artifactId>maven-deploy-plugin</artifactId>
+        <configuration>
+          <skip>true</skip>
+        </configuration>
+      </plugin>
+      <plugin>
         <groupId>org.apache.rat</groupId>
         <artifactId>apache-rat-plugin</artifactId>
         <configuration>