You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cn...@apache.org on 2013/06/21 08:37:39 UTC
svn commit: r1495297 [29/46] - in /hadoop/common/branches/branch-1-win: ./
bin/ conf/ ivy/ lib/jdiff/ src/c++/libhdfs/docs/
src/c++/libhdfs/tests/conf/ src/contrib/capacity-scheduler/ivy/
src/contrib/capacity-scheduler/src/java/org/apache/hadoop/mapred...
Modified: hadoop/common/branches/branch-1-win/src/test/bin/test-patch.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/bin/test-patch.sh?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/bin/test-patch.sh (original)
+++ hadoop/common/branches/branch-1-win/src/test/bin/test-patch.sh Fri Jun 21 06:37:27 2013
@@ -10,647 +10,522 @@
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
+ ulimit -n 1024
+ ### Setup some variables.
+ ### Read variables from properties file
+ BASEDIR=$(dirname $0)
+ . ${BASEDIR}/../test-patch.properties
+ BASEDIR=$(cd ${BASEDIR};pwd)/../../../
+ DATE_CMD='date +"%m-%d-%Y:%T"'
+ CURL_OPTS="-s --connect-timeout 10 --retry 2 --retry-delay 2"
+ JIRA_URL=${JIRA_URL:-"https://issues.apache.org/jira"}
+ PROJECT="HADOOP"
+
+ usage() {
+ echo "
+ usage : $0 <options>
+ Required Options:
+ --patch-dir patch dir
+ --findbugs-home path to findbugs home
+ --forrest-home path to forrest home
+ --eclipse-home path to eclipse home
+ Optional Options:
+ --jenkins set jenkins mode
+ --patch-file pass the patch file in developer mode
+ Required Options when set to jenkins mode:
+ --jira-cli path to jira cli tools
+ --jira-password jira access password
+ --issue-num issue number to test
+ --attach-id patch attachment id to test
+ "
+ exit 1
+ }
-#set -x
-ulimit -n 1024
+ echo_banner() {
+ echo ""
+ echo ""
+ echo "======================================================================"
+ echo "======================================================================"
+ echo " ${*} "
+ echo "======================================================================"
+ echo "======================================================================"
+ echo ""
+ echo ""
+ }
-### Setup some variables.
-### JOB_NAME, SVN_REVISION, and BUILD_NUMBER are set by Hudson if it is run by patch process
-### Read variables from properties file
-. `dirname $0`/../test-patch.properties
-
-###############################################################################
-parseArgs() {
- case "$1" in
- HUDSON)
- ### Set HUDSON to true to indicate that this script is being run by Hudson
- HUDSON=true
- if [[ $# != 17 ]] ; then
- echo "ERROR: usage $0 HUDSON <PATCH_DIR> <SUPPORT_DIR> <PS_CMD> <WGET_CMD> <JIRACLI> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <ECLIPSE_HOME> <PYTHON_HOME> <WORKSPACE_BASEDIR> <TRIGGER_BUILD> <JIRA_PASSWD>"
- cleanupAndExit 0
- fi
- PATCH_DIR=$2
- SUPPORT_DIR=$3
- PS=$4
- WGET=$5
- JIRACLI=$6
- SVN=$7
- GREP=$8
- PATCH=$9
- FINDBUGS_HOME=${10}
- FORREST_HOME=${11}
- ECLIPSE_HOME=${12}
- PYTHON_HOME=${13}
- BASEDIR=${14}
- TRIGGER_BUILD_URL=${15}
- JIRA_PASSWD=${16}
- ### Retrieve the defect number
- if [ ! -e $PATCH_DIR/defectNum ] ; then
- echo "Could not determine the patch to test. Exiting."
- cleanupAndExit 0
- fi
- defect=`cat $PATCH_DIR/defectNum`
- if [ -z "$defect" ] ; then
- echo "Could not determine the patch to test. Exiting."
- cleanupAndExit 0
- fi
- ECLIPSE_PROPERTY="-Declipse.home=$ECLIPSE_HOME"
- PYTHON_PROPERTY="-Dpython.home=$PYTHON_HOME"
+ parseArgs() {
+
+ OPTS=$(getopt \
+ -n $0 \
+ -o '' \
+ -l 'patch-dir:' \
+ -l 'jira-cli:' \
+ -l 'findbugs-home:' \
+ -l 'forrest-home:' \
+ -l 'eclipse-home:' \
+ -l 'jira-password:' \
+ -l 'issue-num:' \
+ -l 'attach-id:' \
+ -l 'patch-file:' \
+ -l 'jenkins:' \
+ -- "$@")
+
+ if [ $? != 0 ]; then
+ usage
+ fi
+
+ eval set -- "$OPTS"
+ while true ; do
+ case "$1" in
+ --patch-dir)
+ PATCH_DIR=${2} ; shift 2
;;
- DEVELOPER)
- ### Set HUDSON to false to indicate that this script is being run by a developer
- HUDSON=false
- if [[ $# != 10 ]] ; then
- echo "ERROR: usage $0 DEVELOPER <PATCH_FILE> <SCRATCH_DIR> <SVN_CMD> <GREP_CMD> <PATCH_CMD> <FINDBUGS_HOME> <FORREST_HOME> <WORKSPACE_BASEDIR>"
- cleanupAndExit 0
+ --jira-cli)
+ JIRA_CLI=${2} ; shift 2
+ ;;
+ --findbugs-home)
+ FINDBUGS_HOME=${2} ; shift 2
+ ;;
+ --forrest-home)
+ FORREST_HOME=${2} ; shift 2
+ ;;
+ --eclipse-home)
+ ECLIPSE_HOME=${2} ; shift 2
+ ;;
+ --jira-password)
+ JIRA_PASSWORD=${2} ; shift 2
+ ;;
+ --issue-num)
+ ISSUE_NUM=${2} ; shift 2
+ ;;
+ --attach-id)
+ ATTACH_ID=${2} ; shift 2
+ ;;
+ --patch-file)
+ PATCH_FILE=${2} ; shift 2
+ ;;
+ --jenkins)
+ JENKINS=${2} ; shift 2
+ ;;
+ --)
+ shift ; break
+ ;;
+ *)
+ echo "Unknows option: $1"
+ usage
+ exit 1
+ ;;
+ esac
+ done
+
+ [[ -n $JENKINS ]] && echo "Jenkins mode" && \
+ [[ -z "$PATCH_DIR" || -z "$JIRA_CLI" || -z "$FINDBUGS_HOME" \
+ || -z "$FORREST_HOME" || -z "$ECLIPSE_HOME" || -z "$JIRA_PASSWORD" \
+ || -z "$ISSUE_NUM" || -z "$ATTACH_ID" ]] \
+ && echo "Required Args Missing" && usage
+ [[ -z $JENKINS ]] && echo "Developer mode" && \
+ [[ -z "$PATCH_FILE" || -z "$PATCH_DIR" || -z "$FINDBUGS_HOME" \
+ || -z "$FORREST_HOME" || -z "$ECLIPSE_HOME" ]] \
+ && echo "Required Args Missing" && usage
+ [[ ! -d "$PATCH_DIR" ]] && mkdir -p "$PATCH_DIR"
+
+ SVN=$(which svn 2> /dev/null)
+ GIT=$(which git 2> /dev/null)
+ [[ -d './.git' && -n $GIT ]] && GITPROJECT="true" && REVISION=$($GIT rev-parse --short HEAD)
+ [[ -d './.svn' && -n $SVN ]] && SVNPROJECT="true" && REVISION=$(svnversion)
+ [[ -z $GITPROJECT && -z $SVNPROJECT ]] && echo "neither git nor an svn project" && exit 1
+
+ CURL=$(which curl 2> /dev/null)
+ GREP=$(which grep 2> /dev/null)
+ PATCH=$(which patch 2> /dev/null)
+ [[ -z $CURL ]] && echo "curl not in path" && exit 1
+ [[ -z $PATCH ]] && echo "patch not in path" && exit 1
+ [[ -z $GREP ]] && echo "grep not in path" && exit 1
+ [[ -z $JENKINS ]] && JENKINS="false"
+ PATCH_URL=${JIRA_URL}/secure/attachment/${ATTACH_ID}/
+ VERSION=${PROJECT}-${REVISION}
+ ANT_CMD="$ANT_HOME/bin/ant -Dversion=${VERSION}"
+ }
+
+ ###############################################################################
+ ### Cleanup files
+ cleanupAndExit () {
+ local result=$1
+ if [[ $JENKINS == "true" ]] ; then
+ if [ -e "$PATCH_DIR" ] ; then
+ mv $PATCH_DIR $BASEDIR
fi
- ### PATCH_FILE contains the location of the patchfile
- PATCH_FILE=$2
- if [[ ! -e "$PATCH_FILE" ]] ; then
- echo "Unable to locate the patch file $PATCH_FILE"
+ fi
+ echo_banner " Finished build."
+ exit $result
+ }
+
+ ###############################################################################
+ checkout () {
+ echo_banner "Testing patch for ${PROJECT}-${ISSUE_NUM}."
+ ### When run by a developer, if the workspace contains modifications, do not continue
+ if [[ -n $GITPROJECT ]] ; then
+ status=$($GIT status)
+ [[ $JENKINS == "false" ]] && [[ -n "$status" ]] && [[ $(echo $status | $GREP -c '(working directory clean)') -eq 0 ]] && \
+ echo "Local modification found $status" && cleanupAndExit 1
+ (cd $BASEDIR ; $GIT clean -fdx ; $GIT reset --hard )
+ fi
+ if [[ -n $SVNPROJECT ]] ; then
+ status=$($SVN stat)
+ [[ $JENKINS == "false" ]] && [[ -n "$status" ]] && \
+ echo "Local modification found $status" && cleanupAndExit 1
+ (cd $BASEDIR ; $SVN revert -R ; rm -rf $($SVN status) ; $SVN up)
+ fi
+ return $?
+ }
+
+
+ ###############################################################################
+ setup () {
+ ### Download latest patch file (ignoring .htm and .html) when run from patch process
+ if [[ $JENKINS = "true" ]] ; then
+ $CURL $CURL_OPTS -o $PATCH_DIR/jira $JIRA_URL/browse/${PROJECT}-${ISSUE_NUM}
+ if [[ `$GREP -c 'Patch Available' $PATCH_DIR/jira` == 0 ]] ; then
+ echo "${PROJECT}-${ISSUE_NUM} is not \"Patch Available\". Exiting."
cleanupAndExit 0
fi
- PATCH_DIR=$3
- ### Check if $PATCH_DIR exists. If it does not exist, create a new directory
- if [[ ! -e "$PATCH_DIR" ]] ; then
- mkdir "$PATCH_DIR"
- if [[ $? == 0 ]] ; then
- echo "$PATCH_DIR has been created"
- else
- echo "Unable to create $PATCH_DIR"
- cleanupAndExit 0
- fi
- fi
- SVN=$4
- GREP=$5
- PATCH=$6
- FINDBUGS_HOME=$7
- FORREST_HOME=$8
- BASEDIR=$9
- ### Obtain the patch filename to append it to the version number
- defect=`basename $PATCH_FILE`
- ;;
- *)
- echo "ERROR: usage $0 HUDSON [args] | DEVELOPER [args]"
- cleanupAndExit 0
- ;;
- esac
-}
-
-###############################################################################
-checkout () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Testing patch for ${defect}."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- ### When run by a developer, if the workspace contains modifications, do not continue
- status=`$SVN stat`
- if [[ $HUDSON == "false" ]] ; then
- if [[ "$status" != "" ]] ; then
- echo "ERROR: can't run in a workspace that contains the following modifications"
- echo "$status"
- cleanupAndExit 1
- fi
- else
- cd $BASEDIR
- $SVN revert -R .
- rm -rf `$SVN status`
- $SVN update
- fi
- return $?
-}
-
-###############################################################################
-setup () {
- ### Download latest patch file (ignoring .htm and .html) when run from patch process
- if [[ $HUDSON == "true" ]] ; then
- $WGET -q -O $PATCH_DIR/jira http://issues.apache.org/jira/browse/$defect
- if [[ `$GREP -c 'Patch Available' $PATCH_DIR/jira` == 0 ]] ; then
- echo "$defect is not \"Patch Available\". Exiting."
- cleanupAndExit 0
- fi
- relativePatchURL=`$GREP -o '"/jira/secure/attachment/[0-9]*/[^"]*' $PATCH_DIR/jira | $GREP -v -e 'htm[l]*$' | sort | tail -1 | $GREP -o '/jira/secure/attachment/[0-9]*/[^"]*'`
- patchURL="http://issues.apache.org${relativePatchURL}"
- patchNum=`echo $patchURL | $GREP -o '[0-9]*/' | $GREP -o '[0-9]*'`
- echo "$defect patch is being downloaded at `date` from"
- echo "$patchURL"
- $WGET -q -O $PATCH_DIR/patch $patchURL
- VERSION=${SVN_REVISION}_${defect}_PATCH-${patchNum}
- JIRA_COMMENT="Here are the results of testing the latest attachment
- $patchURL
- against trunk revision ${SVN_REVISION}."
-
- ### Copy in any supporting files needed by this process
- cp -r $SUPPORT_DIR/lib/* ./lib
- #PENDING: cp -f $SUPPORT_DIR/etc/checkstyle* ./src/test
- ### Copy the patch file to $PATCH_DIR
- else
- VERSION=PATCH-${defect}
- cp $PATCH_FILE $PATCH_DIR/patch
- if [[ $? == 0 ]] ; then
- echo "Patch file $PATCH_FILE copied to $PATCH_DIR"
+ echo " [`$DATE_CMD`] Downloaded $PATCH_URL"
+ $CURL $CURL_OPTS -o $PATCH_DIR/patch $PATCH_URL
+ [[ $? -ne 0 ]] && echo "$PATCH_URL download failed" && cleanupAndExit 1
+ JIRA_COMMENT="Here are the results of testing
+ $patchURL
+ against revision ${REVISION}"
else
- echo "Could not copy $PATCH_FILE to $PATCH_DIR"
- cleanupAndExit 0
+ cp $PATCH_FILE $PATCH_DIR/patch
+ [[ $? -ne 0 ]] && echo "Could not copy $PATCH_FILE to $PATCH_DIR" && cleanupAndExit 1
+ echo "Patch file $PATCH_FILE copied to $PATCH_DIR"
fi
- fi
- ### exit if warnings are NOT defined in the properties file
- if [ -z "$OK_FINDBUGS_WARNINGS" ] || [[ -z "$OK_JAVADOC_WARNINGS" ]] || [[ -z $OK_RELEASEAUDIT_WARNINGS ]]; then
- echo "Please define the following properties in test-patch.properties file"
- echo "OK_FINDBUGS_WARNINGS"
- echo "OK_RELEASEAUDIT_WARNINGS"
- echo "OK_JAVADOC_WARNINGS"
- cleanupAndExit 1
- fi
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Pre-build trunk to verify trunk stability and javac warnings"
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- ### DISABLE RELEASE AUDIT UNTIL HADOOP-4074 IS FIXED
- ### Do not call releaseaudit when run by a developer
- ### if [[ $HUDSON == "true" ]] ; then
- ### echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/trunkReleaseAuditWarnings.txt 2>&1"
- ### $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/trunkReleaseAuditWarnings.txt 2>&1
- ### fi
- echo "$ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1"
- $ANT_HOME/bin/ant -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/trunkJavacWarnings.txt 2>&1
- if [[ $? != 0 ]] ; then
- echo "Trunk compilation is broken?"
+ ### exit if warnings are NOT defined in the properties file
+ if [ -z "$OK_FINDBUGS_WARNINGS" ] || [[ -z "$OK_JAVADOC_WARNINGS" ]] || [[ -z $OK_RELEASEAUDIT_WARNINGS ]]; then
+ echo "Please define the following properties in test-patch.properties file"
+ echo "OK_FINDBUGS_WARNINGS"
+ echo "OK_RELEASEAUDIT_WARNINGS"
+ echo "OK_JAVADOC_WARNINGS"
cleanupAndExit 1
- fi
-}
+ fi
+ echo_banner " Pre-build codebase to verify stability and javac warnings"
+ ### Do not call releaseaudit when run by a developer
+ if [[ $JENKINS = "true" ]] ; then
+ echo "$ANT_CMD -Dforrest.home=${FORREST_HOME} -D${PROJECT}PatchProcess= releaseaudit > $PATCH_DIR/currentReleaseAuditWarnings.txt 2>&1"
+ $ANT_CMD -Dforrest.home=${FORREST_HOME} -D${PROJECT}PatchProcess= releaseaudit > $PATCH_DIR/currentReleaseAuditWarnings.txt 2>&1
+ fi
+ echo "$ANT_CMD -Djavac.args=-Xlint -Xmaxwarns 1000 -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/currentJavacWarnings.txt 2>&1"
+ $ANT_CMD -Djavac.args="-Xlint -Xmaxwarns 1000" -Dforrest.home=${FORREST_HOME} -D${PROJECT_NAME}PatchProcess= clean tar > $PATCH_DIR/currentJavacWarnings.txt 2>&1
+ [[ $? -ne 0 ]] && echo "compilation is broken?" && cleanupAndExit 1
+ }
+
+ ###############################################################################
+ ### Check for @author tags in the patch
+ checkAuthor () {
+ echo_banner " Checking there are no @author tags in the patch."
+ authorTags=`$GREP -c -i '@author' $PATCH_DIR/patch`
+ echo "There appear to be $authorTags @author tags in the patch."
+ if [[ $authorTags != 0 ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
-###############################################################################
-### Check for @author tags in the patch
-checkAuthor () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Checking there are no @author tags in the patch."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- authorTags=`$GREP -c -i '@author' $PATCH_DIR/patch`
- echo "There appear to be $authorTags @author tags in the patch."
- if [[ $authorTags != 0 ]] ; then
+ -1 @author. The patch appears to contain $authorTags @author tags which the Hadoop community has agreed to not allow in code contributions."
+ return 1
+ fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 @author. The patch appears to contain $authorTags @author tags which the Hadoop community has agreed to not allow in code contributions."
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 @author. The patch does not contain any @author tags."
- return 0
-}
-
-###############################################################################
-### Check for tests in the patch
-checkTests () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Checking there are new or changed tests in the patch."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- testReferences=`$GREP -c -i '/test' $PATCH_DIR/patch`
- echo "There appear to be $testReferences test files referenced in the patch."
- if [[ $testReferences == 0 ]] ; then
- if [[ $HUDSON == "true" ]] ; then
- patchIsDoc=`$GREP -c -i 'title="documentation' $PATCH_DIR/jira`
- if [[ $patchIsDoc != 0 ]] ; then
- echo "The patch appears to be a documentation patch that doesn't require tests."
- JIRA_COMMENT="$JIRA_COMMENT
-
- +0 tests included. The patch appears to be a documentation patch that doesn't require tests."
- return 0
+ +1 @author. The patch does not contain any @author tags."
+ return 0
+ }
+
+ ###############################################################################
+ ### Check for tests in the patch
+ checkTests () {
+ echo_banner " Checking there are new or changed tests in the patch."
+ testReferences=`$GREP -c -i '/test' $PATCH_DIR/patch`
+ echo "There appear to be $testReferences test files referenced in the patch."
+ if [[ $testReferences == 0 ]] ; then
+ if [[ $JENKINS == "true" ]] ; then
+ patchIsDoc=`$GREP -c -i 'title="documentation' $PATCH_DIR/jira`
+ if [[ $patchIsDoc != 0 ]] ; then
+ echo "The patch appears to be a documentation patch that doesn\'t require tests."
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ +0 tests included. The patch appears to be a documentation patch that doesn't require tests."
+ return 0
+ fi
fi
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ -1 tests included. The patch doesn't appear to include any new or modified tests.
+ Please justify why no tests are needed for this patch."
+ return 1
fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 tests included. The patch doesn't appear to include any new or modified tests.
- Please justify why no tests are needed for this patch."
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 tests included. The patch appears to include $testReferences new or modified tests."
- return 0
-}
+ +1 tests included. The patch appears to include $testReferences new or modified tests."
+ return 0
+ }
+
+ ###############################################################################
+ ### Attempt to apply the patch
+ applyPatch () {
+ echo_banner " Applying patch."
+ $PATCH -t -l -E -p0 < $PATCH_DIR/patch
+ [[ $? -ne 0 ]] && $PATCH -t -l -E -p1 < $PATCH_DIR/patch
+ if [ $? -ne 0 ] ; then
+ echo "PATCH APPLICATION FAILED"
+ JIRA_COMMENT="$JIRA_COMMENT
-###############################################################################
-### Attempt to apply the patch
-applyPatch () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Applying patch."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- $PATCH -E -p0 < $PATCH_DIR/patch
- if [[ $? != 0 ]] ; then
- echo "PATCH APPLICATION FAILED"
- JIRA_COMMENT="$JIRA_COMMENT
+ -1 patch. The patch command could not apply the patch."
+ return 1
+ fi
+ return 0
+ }
- -1 patch. The patch command could not apply the patch."
- return 1
- fi
- return 0
-}
-###############################################################################
-### Check there are no javadoc warnings
-checkJavadocWarnings () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Determining number of patched javadoc warnings."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
- javadocWarnings=`$GREP -o '\[javadoc\] [0-9]* warning' $PATCH_DIR/patchJavadocWarnings.txt | awk '{total += $2} END {print total}'`
- echo ""
- echo ""
- echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
+ ###############################################################################
+ ### Check there are no javadoc warnings
+ checkJavadocWarnings () {
+ echo_banner " Determining number of patched javadoc warnings."
+ echo "$ANT_CMD -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt"
+ $ANT_CMD -DHadoopPatchProcess= clean javadoc | tee $PATCH_DIR/patchJavadocWarnings.txt
+ javadocWarnings=`$GREP -o '\[javadoc\] [0-9]* warning' $PATCH_DIR/patchJavadocWarnings.txt | awk '{total += $2} END {print total}'`
+ echo ""
+ echo ""
+ echo "There appear to be $javadocWarnings javadoc warnings generated by the patched build."
- ### if current warnings greater than OK_JAVADOC_WARNINGS
- if [[ $javadocWarnings > $OK_JAVADOC_WARNINGS ]] ; then
- JIRA_COMMENT="$JIRA_COMMENT
+ ### if current warnings greater than OK_JAVADOC_WARNINGS
+ if [[ $javadocWarnings > $OK_JAVADOC_WARNINGS ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
- -1 javadoc. The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 javadoc. The javadoc tool did not generate any warning messages."
-return 0
-}
-
-###############################################################################
-### Check there are no changes in the number of Javac warnings
-checkJavacWarnings () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Determining number of patched javac warnings."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -Djavac.args="-Xlint -Xmaxwarns 1000" $ECLIPSE_PROPERTY -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
- if [[ $? != 0 ]] ; then
+ -1 javadoc. The javadoc tool appears to have generated `expr $(($javadocWarnings-$OK_JAVADOC_WARNINGS))` warning messages."
+ return 1
+ fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 javac. The patch appears to cause tar ant target to fail."
- return 1
- fi
+ +1 javadoc. The javadoc tool did not generate any warning messages."
+ return 0
+ }
- ### Compare trunk and patch javac warning numbers
- if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
- trunkJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/trunkJavacWarnings.txt | awk '{total += $2} END {print total}'`
- patchJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/patchJavacWarnings.txt | awk '{total += $2} END {print total}'`
- echo "There appear to be $trunkJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
- if [[ $patchJavacWarnings != "" && $trunkJavacWarnings != "" ]] ; then
- if [[ $patchJavacWarnings -gt $trunkJavacWarnings ]] ; then
- JIRA_COMMENT="$JIRA_COMMENT
+ ###############################################################################
+ ### Check there are no changes in the number of Javac warnings
+ checkJavacWarnings () {
+ echo_banner " Determining number of patched javac warnings."
+ echo "$ANT_CMD -Djavac.args=-Xlint -Xmaxwarns 1000 -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1"
+ $ANT_CMD -Djavac.args="-Xlint -Xmaxwarns 1000" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= tar > $PATCH_DIR/patchJavacWarnings.txt 2>&1
+ if [[ $? != 0 ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
- -1 javac. The applied patch generated $patchJavacWarnings javac compiler warnings (more than the trunk's current $trunkJavacWarnings warnings)."
- return 1
- fi
+ -1 javac. The patch appears to cause tar ant target to fail."
+ return 1
fi
- fi
- JIRA_COMMENT="$JIRA_COMMENT
- +1 javac. The applied patch does not increase the total number of javac compiler warnings."
- return 0
-}
+ ### Compare current codebase and patch javac warning numbers
+ if [[ -f $PATCH_DIR/patchJavacWarnings.txt ]] ; then
+ currentJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/currentJavacWarnings.txt | awk '{total += $2} END {print total}'`
+ patchJavacWarnings=`$GREP -o '\[javac\] [0-9]* warning' $PATCH_DIR/patchJavacWarnings.txt | awk '{total += $2} END {print total}'`
+ echo "There appear to be $currentJavacWarnings javac compiler warnings before the patch and $patchJavacWarnings javac compiler warnings after applying the patch."
+ if [[ $patchJavacWarnings != "" && $currentJavacWarnings != "" ]] ; then
+ if [[ $patchJavacWarnings -gt $currentJavacWarnings ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
-###############################################################################
-### Check there are no changes in the number of release audit (RAT) warnings
-checkReleaseAuditWarnings () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Determining number of patched release audit warnings."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
-
- ### Compare trunk and patch release audit warning numbers
- if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
- patchReleaseAuditWarnings=`$GREP -c '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt`
- echo ""
- echo ""
- echo "There appear to be $OK_RELEASEAUDIT_WARNINGS release audit warnings before the patch and $patchReleaseAuditWarnings release audit warnings after applying the patch."
- if [[ $patchReleaseAuditWarnings != "" && $OK_RELEASEAUDIT_WARNINGS != "" ]] ; then
- if [[ $patchReleaseAuditWarnings -gt $OK_RELEASEAUDIT_WARNINGS ]] ; then
- JIRA_COMMENT="$JIRA_COMMENT
+ -1 javac. The applied patch generated $patchJavacWarnings javac compiler warnings (more than the current $currentJavacWarnings warnings)."
- -1 release audit. The applied patch generated $patchReleaseAuditWarnings release audit warnings (more than the trunk's current $OK_RELEASEAUDIT_WARNINGS warnings)."
- $GREP '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt > $PATCH_DIR/patchReleaseAuditProblems.txt
- echo "Lines that start with ????? in the release audit report indicate files that do not have an Apache license header." >> $PATCH_DIR/patchReleaseAuditProblems.txt
- JIRA_COMMENT_FOOTER="Release audit warnings: $BUILD_URL/artifact/trunk/patchprocess/patchReleaseAuditProblems.txt
-$JIRA_COMMENT_FOOTER"
- return 1
+ return 1
+ fi
fi
fi
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 release audit. The applied patch does not increase the total number of release audit warnings."
- return 0
-}
-
-###############################################################################
-### Check there are no changes in the number of Checkstyle warnings
-checkStyle () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Determining number of patched checkstyle warnings."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- echo "THIS IS NOT IMPLEMENTED YET"
- echo ""
- echo ""
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= checkstyle
- JIRA_COMMENT_FOOTER="Checkstyle results: http://hudson.zones.apache.org/hudson/job/$JOB_NAME/$BUILD_NUMBER/artifact/trunk/build/test/checkstyle-errors.html
-$JIRA_COMMENT_FOOTER"
- ### TODO: calculate actual patchStyleErrors
-# patchStyleErrors=0
-# if [[ $patchStyleErrors != 0 ]] ; then
-# JIRA_COMMENT="$JIRA_COMMENT
-#
-# -1 checkstyle. The patch generated $patchStyleErrors code style errors."
-# return 1
-# fi
-# JIRA_COMMENT="$JIRA_COMMENT
-#
-# +1 checkstyle. The patch generated 0 code style errors."
- return 0
-}
-
-###############################################################################
-### Check there are no changes in the number of Findbugs warnings
-checkFindbugsWarnings () {
- findbugs_version=`${FINDBUGS_HOME}/bin/findbugs -version`
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Determining number of patched Findbugs warnings."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
- if [ $? != 0 ] ; then
JIRA_COMMENT="$JIRA_COMMENT
- -1 findbugs. The patch appears to cause Findbugs (version ${findbugs_version}) to fail."
- return 1
- fi
-JIRA_COMMENT_FOOTER="Findbugs warnings: http://hudson.zones.apache.org/hudson/job/$JOB_NAME/$BUILD_NUMBER/artifact/trunk/build/test/findbugs/newPatchFindbugsWarnings.html
-$JIRA_COMMENT_FOOTER"
- cp $BASEDIR/build/test/findbugs/*.xml $PATCH_DIR/patchFindbugsWarnings.xml
- $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
- $PATCH_DIR/patchFindbugsWarnings.xml \
- $PATCH_DIR/patchFindbugsWarnings.xml
- findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
- $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml | /usr/bin/awk '{print $1}'`
- $FINDBUGS_HOME/bin/convertXmlToText -html \
- $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml \
- $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html
- cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html $PATCH_DIR/newPatchFindbugsWarnings.html
- cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml $PATCH_DIR/newPatchFindbugsWarnings.xml
-
- ### if current warnings greater than OK_FINDBUGS_WARNINGS
- if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
+ +1 javac. The applied patch does not increase the total number of javac compiler warnings."
+ return 0
+ }
+
+ ###############################################################################
+ ### Check there are no changes in the number of release audit (RAT) warnings
+ checkReleaseAuditWarnings () {
+ echo_banner " Determining number of patched release audit warnings."
+ echo "$ANT_CMD -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1"
+ ${ANT_CMD} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= releaseaudit > $PATCH_DIR/patchReleaseAuditWarnings.txt 2>&1
+
+ ### Compare current and patch release audit warning numbers
+ if [[ -f $PATCH_DIR/patchReleaseAuditWarnings.txt ]] ; then
+ patchReleaseAuditWarnings=`$GREP -c '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt`
+ echo ""
+ echo ""
+ echo "There appear to be $OK_RELEASEAUDIT_WARNINGS release audit warnings before the patch and $patchReleaseAuditWarnings release audit warnings after applying the patch."
+ if [[ $patchReleaseAuditWarnings != "" && $OK_RELEASEAUDIT_WARNINGS != "" ]] ; then
+ if [[ $patchReleaseAuditWarnings -gt $OK_RELEASEAUDIT_WARNINGS ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ -1 release audit. The applied patch generated $patchReleaseAuditWarnings release audit warnings (more than the current $OK_RELEASEAUDIT_WARNINGS warnings)."
+ $GREP '\!?????' $PATCH_DIR/patchReleaseAuditWarnings.txt > $PATCH_DIR/patchReleaseAuditProblems.txt
+ echo "Lines that start with ????? in the release audit report indicate files that do not have an Apache license header." >> $PATCH_DIR/patchReleaseAuditProblems.txt
+ JIRA_COMMENT_FOOTER="Release audit warnings: $BUILD_URL/artifact/trunk/patchprocess/patchReleaseAuditProblems.txt
+ $JIRA_COMMENT_FOOTER"
+ return 1
+ fi
+ fi
+ fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 findbugs. The patch appears to introduce `expr $(($findbugsWarnings-$OK_FINDBUGS_WARNINGS))` new Findbugs (version ${findbugs_version}) warnings."
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 findbugs. The patch does not introduce any new Findbugs (version ${findbugs_version}) warnings."
- return 0
-}
+ +1 release audit. The applied patch does not increase the total number of release audit warnings."
+ return 0
+ }
+
+ ###############################################################################
+ ### Check there are no changes in the number of Findbugs warnings
+ checkFindbugsWarnings () {
+ findbugs_version=`${FINDBUGS_HOME}/bin/findbugs -version`
+ echo_banner " Determining number of patched Findbugs warnings."
+ echo "$ANT_CMD -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs"
+ $ANT_CMD -Dfindbugs.home=${FINDBUGS_HOME} -Dforrest.home=${FORREST_HOME} -DHadoopPatchProcess= findbugs
+ if [ $? != 0 ] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ -1 findbugs. The patch appears to cause Findbugs \(version ${findbugs_version}\) to fail."
+ return 1
+ fi
+ JIRA_COMMENT_FOOTER="Findbugs warnings: $BUILD_URL/artifact/trunk/build/test/findbugs/newPatchFindbugsWarnings.html
+ $JIRA_COMMENT_FOOTER"
+ cp $BASEDIR/build/test/findbugs/*.xml $PATCH_DIR/patchFindbugsWarnings.xml
+ $FINDBUGS_HOME/bin/setBugDatabaseInfo -timestamp "01/01/2000" \
+ $PATCH_DIR/patchFindbugsWarnings.xml \
+ $PATCH_DIR/patchFindbugsWarnings.xml
+ findbugsWarnings=`$FINDBUGS_HOME/bin/filterBugs -first "01/01/2000" $PATCH_DIR/patchFindbugsWarnings.xml \
+ $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml | awk '{print $1}'`
+ $FINDBUGS_HOME/bin/convertXmlToText -html \
+ $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml \
+ $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html
+ cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.html $PATCH_DIR/newPatchFindbugsWarnings.html
+ cp $BASEDIR/build/test/findbugs/newPatchFindbugsWarnings.xml $PATCH_DIR/newPatchFindbugsWarnings.xml
+
+ ### if current warnings greater than OK_FINDBUGS_WARNINGS
+ if [[ $findbugsWarnings > $OK_FINDBUGS_WARNINGS ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ -1 findbugs. The patch appears to introduce `expr $(($findbugsWarnings-$OK_FINDBUGS_WARNINGS))` new Findbugs (version ${findbugs_version}) warnings."
+ return 1
+ fi
+ JIRA_COMMENT="$JIRA_COMMENT
-###############################################################################
-### Run the test-core target
-runCoreTests () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Running core tests."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
-
- ### Kill any rogue build processes from the last attempt
- $PS auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
+ +1 findbugs. The patch does not introduce any new Findbugs (version ${findbugs_version}) warnings."
+ return 0
+ }
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME create-c++-configure docs tar test-core"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME create-c++-configure docs tar test-core
- if [[ $? != 0 ]] ; then
- failed_tests=`grep -l "<failure" build/test/*.xml | sed -e "s|build/test/TEST-| |g" | sed -e "s|\.xml||g"`
+ ###############################################################################
+ ### Run the test-core target
+ runCoreTests () {
+ echo_banner " Running core tests."
+
+ ### Kill any rogue build processes from the last attempt
+ ps auxwww | $GREP HadoopPatchProcess | awk '{print $2}' | xargs -t -I {} kill -9 {} > /dev/null
+
+ echo "$ANT_CMD -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME create-c++-configure test-core"
+ $ANT_CMD -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes -Dcompile.c++=yes -Dforrest.home=$FORREST_HOME create-c++-configure test-core
+ if [[ $? != 0 ]] ; then
+ failed_tests=`grep -l "<failure" build/test/*.xml | sed -e "s|build/test/TEST-| |g" | sed -e "s|\.xml||g"`
+ JIRA_COMMENT="$JIRA_COMMENT
+
+ -1 core tests. The patch failed these core unit tests:
+ $failed_tests"
+ return 1
+ fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 core tests. The patch failed these core unit tests:
- $failed_tests"
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
+ +1 core tests. The patch passed core unit tests."
+ return 0
+ }
+
+ ###############################################################################
+ ### Run the test-contrib target
+ runContribTests () {
+ echo_banner " Running contrib tests."
+ ### Kill any rogue build processes from the last attempt
+ ps -auxwww | $GREP HadoopPatchProcess | awk '{print $2}' | xargs -t -I {} kill -9 {} > /dev/null
+
+ echo "$ANT_CMD -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes test-contrib"
+ $ANT_CMD -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes test-contrib
+ if [[ $? != 0 ]] ; then
+ JIRA_COMMENT="$JIRA_COMMENT
- +1 core tests. The patch passed core unit tests."
- return 0
-}
-
-###############################################################################
-### Run the test-contrib target
-runContribTests () {
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Running contrib tests."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
-
- ### Kill any rogue build processes from the last attempt
- $PS -auxwww | $GREP HadoopPatchProcess | /usr/bin/nawk '{print $2}' | /usr/bin/xargs -t -I {} /bin/kill -9 {} > /dev/null
-
- echo "$ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY $PYTHON_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes test-contrib"
- $ANT_HOME/bin/ant -Dversion="${VERSION}" $ECLIPSE_PROPERTY $PYTHON_PROPERTY -DHadoopPatchProcess= -Dtest.junit.output.format=xml -Dtest.output=yes test-contrib
- if [[ $? != 0 ]] ; then
+ -1 contrib tests. The patch failed contrib unit tests."
+ return 1
+ fi
JIRA_COMMENT="$JIRA_COMMENT
- -1 contrib tests. The patch failed contrib unit tests."
- return 1
- fi
- JIRA_COMMENT="$JIRA_COMMENT
-
- +1 contrib tests. The patch passed contrib unit tests."
- return 0
-}
+ +1 contrib tests. The patch passed contrib unit tests."
+ return 0
+ }
+
+ ###############################################################################
+ ### Submit a comment to the defect's Jira
+ submitJiraComment () {
+ local result=$1
+ ### Do not output the value of JIRA_COMMENT_FOOTER when run by a developer
+ if [[ $JENKINS == "false" ]] ; then
+ JIRA_COMMENT_FOOTER=""
+ fi
+ if [[ $result == 0 ]] ; then
+ comment="+1 overall. $JIRA_COMMENT
-###############################################################################
-### Submit a comment to the defect's Jira
-submitJiraComment () {
- local result=$1
- ### Do not output the value of JIRA_COMMENT_FOOTER when run by a developer
- if [[ $HUDSON == "false" ]] ; then
- JIRA_COMMENT_FOOTER=""
- fi
- if [[ $result == 0 ]] ; then
- comment="+1 overall. $JIRA_COMMENT
+ $JIRA_COMMENT_FOOTER"
+ else
+ comment="-1 overall. $JIRA_COMMENT
-$JIRA_COMMENT_FOOTER"
- else
- comment="-1 overall. $JIRA_COMMENT
+ $JIRA_COMMENT_FOOTER"
+ fi
+ ### Output the test result to the console
+ echo "
-$JIRA_COMMENT_FOOTER"
- fi
- ### Output the test result to the console
- echo "
+ $comment"
-$comment"
+ if [[ $JENKINS == "true" ]] ; then
+ echo_banner " Adding comment to Jira."
- if [[ $HUDSON == "true" ]] ; then
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Adding comment to Jira."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
+ ### Update Jira with a comment
+ export USER=jenkins
+ $JIRA_CLI -s ${JIRA_URL} login hadoopqa $JIRA_PASSWD
+ $JIRA_CLI -s ${JIRA_URL} comment ${PROJECT}-${ISSUE_NUM} "$comment"
+ $JIRA_CLI -s ${JIRA_URL} logout
+ fi
+ }
- ### Update Jira with a comment
- export USER=hudson
- $JIRACLI -s issues.apache.org/jira login hadoopqa $JIRA_PASSWD
- $JIRACLI -s issues.apache.org/jira comment $defect "$comment"
- $JIRACLI -s issues.apache.org/jira logout
- fi
-}
-###############################################################################
-### Cleanup files
-cleanupAndExit () {
- local result=$1
- if [[ $HUDSON == "true" ]] ; then
- if [ -e "$PATCH_DIR" ] ; then
- mv $PATCH_DIR $BASEDIR
- fi
+ parseArgs $@
+ checkout
+ [[ $? -ne 0 ]] && cleanupAndExit 1
+ setup
+ checkAuthor
+ RESULT=$?
+ checkTests
+ (( RESULT = RESULT + $? ))
+ applyPatch
+ if [[ $? != 0 ]] ; then
+ submitJiraComment 1
+ cleanupAndExit 1
fi
- echo ""
- echo ""
- echo "======================================================================"
- echo "======================================================================"
- echo " Finished build."
- echo "======================================================================"
- echo "======================================================================"
- echo ""
- echo ""
- exit $result
-}
-
-###############################################################################
-###############################################################################
-###############################################################################
-
-JIRA_COMMENT=""
-JIRA_COMMENT_FOOTER="Console output: http://hudson.zones.apache.org/hudson/job/$JOB_NAME/$BUILD_NUMBER/console
-
-This message is automatically generated."
-
-### Check if arguments to the script have been specified properly or not
-parseArgs $@
-cd $BASEDIR
-
-checkout
-RESULT=$?
-if [[ $HUDSON == "true" ]] ; then
- if [[ $RESULT != 0 ]] ; then
- ### Resubmit build.
- $WGET -q -O $PATCH_DIR/build $TRIGGER_BUILD_URL
- exit 100
+ checkJavadocWarnings
+ (( RESULT = $RESULT + $? ))
+ checkJavacWarnings
+ (( RESULT = $RESULT + $? ))
+ checkStyle
+ (( RESULT = $RESULT + $? ))
+ checkFindbugsWarnings
+ (( RESULT = $RESULT + $? ))
+ ### Do not call these when run by a developer
+ if [[ $JENKINS == "true" ]] ; then
+ checkReleaseAuditWarnings
+ (( RESULT = $RESULT + $? ))
+ runCoreTests
+ (( RESULT = $RESULT + $? ))
+ runContribTests
+ (( RESULT = $RESULT + $? ))
fi
-fi
-setup
-checkAuthor
-RESULT=$?
-
-checkTests
-(( RESULT = RESULT + $? ))
-applyPatch
-if [[ $? != 0 ]] ; then
- submitJiraComment 1
- cleanupAndExit 1
-fi
-checkJavadocWarnings
-(( RESULT = RESULT + $? ))
-checkJavacWarnings
-(( RESULT = RESULT + $? ))
-checkStyle
-(( RESULT = RESULT + $? ))
-checkFindbugsWarnings
-(( RESULT = RESULT + $? ))
-### Do not call these when run by a developer
-if [[ $HUDSON == "true" ]] ; then
- ### DISABLE RELEASE AUDIT UNTIL HADOOP-4074 IS FIXED
- ### checkReleaseAuditWarnings
- ### (( RESULT = RESULT + $? ))
- runCoreTests
- (( RESULT = RESULT + $? ))
- runContribTests
- (( RESULT = RESULT + $? ))
-fi
-JIRA_COMMENT_FOOTER="Test results: http://hudson.zones.apache.org/hudson/job/$JOB_NAME/$BUILD_NUMBER/testReport/
-$JIRA_COMMENT_FOOTER"
-
-submitJiraComment $RESULT
-cleanupAndExit $RESULT
-
+ JIRA_COMMENT_FOOTER="Test results\:$BUILD_URL/testReport/\"
+ $JIRA_COMMENT_FOOTER"
+ submitJiraComment $RESULT
+ cleanupAndExit $RESULT
Modified: hadoop/common/branches/branch-1-win/src/test/checkstyle-noframes-sorted.xsl
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/checkstyle-noframes-sorted.xsl?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/checkstyle-noframes-sorted.xsl (original)
+++ hadoop/common/branches/branch-1-win/src/test/checkstyle-noframes-sorted.xsl Fri Jun 21 06:37:27 2013
@@ -1,4 +1,17 @@
<xsl:stylesheet xmlns:xsl="http://www.w3.org/1999/XSL/Transform" version="1.0">
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
<xsl:output method="html" indent="yes"/>
<xsl:decimal-format decimal-separator="." grouping-separator="," />
Modified: hadoop/common/branches/branch-1-win/src/test/commit-tests
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/commit-tests?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/commit-tests (original)
+++ hadoop/common/branches/branch-1-win/src/test/commit-tests Fri Jun 21 06:37:27 2013
@@ -118,6 +118,7 @@
**/TestGetBlocks.java
**/TestHDFSServerPorts.java
**/TestHDFSTrash.java
+**/TestHDFSConcat.java
**/TestHeartbeatHandling.java
**/TestHost2NodesMap.java
**/TestInterDatanodeProtocol.java
Modified: hadoop/common/branches/branch-1-win/src/test/ddl/buffer.jr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/ddl/buffer.jr?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/ddl/buffer.jr (original)
+++ hadoop/common/branches/branch-1-win/src/test/ddl/buffer.jr Fri Jun 21 06:37:27 2013
@@ -1,3 +1,18 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
module org.apache.hadoop.record {
class RecBuffer {
buffer data;
Modified: hadoop/common/branches/branch-1-win/src/test/ddl/int.jr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/ddl/int.jr?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/ddl/int.jr (original)
+++ hadoop/common/branches/branch-1-win/src/test/ddl/int.jr Fri Jun 21 06:37:27 2013
@@ -1,3 +1,18 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
module org.apache.hadoop.record {
class RecInt {
int data;
Modified: hadoop/common/branches/branch-1-win/src/test/ddl/string.jr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/ddl/string.jr?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/ddl/string.jr (original)
+++ hadoop/common/branches/branch-1-win/src/test/ddl/string.jr Fri Jun 21 06:37:27 2013
@@ -1,3 +1,18 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
module org.apache.hadoop.record {
class RecString {
ustring data;
Modified: hadoop/common/branches/branch-1-win/src/test/ddl/test.jr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/ddl/test.jr?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/ddl/test.jr (original)
+++ hadoop/common/branches/branch-1-win/src/test/ddl/test.jr Fri Jun 21 06:37:27 2013
@@ -1,3 +1,18 @@
+/**
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+
module org.apache.hadoop.record {
class RecRecord0 {
ustring stringVal;
Modified: hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml (original)
+++ hadoop/common/branches/branch-1-win/src/test/findbugsExcludeFile.xml Fri Jun 21 06:37:27 2013
@@ -137,4 +137,10 @@
<Method name="doAbort" />
<Bug pattern="DM_EXIT" />
</Match>
+ <!-- Don't complain about System.exit() being called from quit() -->
+ <Match>
+ <Class name="org.apache.hadoop.hdfs.server.namenode.MetaRecoveryContext" />
+ <Method name="quit" />
+ <Bug pattern="DM_EXIT" />
+ </Match>
</FindBugsFilter>
Modified: hadoop/common/branches/branch-1-win/src/test/krb5.conf
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/krb5.conf?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/krb5.conf (original)
+++ hadoop/common/branches/branch-1-win/src/test/krb5.conf Fri Jun 21 06:37:27 2013
@@ -1,3 +1,21 @@
+#
+# Licensed to the Apache Software Foundation (ASF) under one
+# or more contributor license agreements. See the NOTICE file
+# distributed with this work for additional information
+# regarding copyright ownership. The ASF licenses this file
+# to you under the Apache License, Version 2.0 (the
+# "License"); you may not use this file except in compliance
+# with the License. You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
[libdefaults]
default_realm = APACHE.ORG
udp_preference_limit = 1
Modified: hadoop/common/branches/branch-1-win/src/test/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/log4j.properties?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/log4j.properties (original)
+++ hadoop/common/branches/branch-1-win/src/test/log4j.properties Fri Jun 21 06:37:27 2013
@@ -1,3 +1,16 @@
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
# log4j configuration used during build and unit tests
log4j.rootLogger=info,stdout
Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xml Fri Jun 21 06:37:27 2013
@@ -527,7 +527,11 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -551,6 +555,7 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/data15bytes</expected-output>
</comparator>
<comparator>
@@ -564,6 +569,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>DATAFILE120_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/data120bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^30( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/data30bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^60( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/data60bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^120( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/data120bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -607,7 +627,11 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -709,7 +733,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^NAMENODE/user/[a-zA-Z0-9]*/dir0( |\t)*DATAFILE_TOTAL_2xSIZE</expected-output>
+=======
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0( |\t)*450</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1318,7 +1346,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
+=======
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1370,6 +1402,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
</comparator>
<comparator>
@@ -1383,6 +1416,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/file3</expected-output>
+=======
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file1</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file2</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file3</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1483,7 +1531,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
+=======
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1517,7 +1569,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/dir0</expected-output>
+=======
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1569,6 +1625,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/dir0</expected-output>
</comparator>
<comparator>
@@ -1582,6 +1639,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>^Deleted NAMENODE/user/[a-zA-Z0-9]*/dir3</expected-output>
+=======
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir1</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir2</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Deleted hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir3</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1659,7 +1731,11 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1705,6 +1781,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/dir1/data/data15bytes</expected-output>
</comparator>
<comparator>
@@ -1718,6 +1795,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>DATAFILE120_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/dir1/data/data120bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data15bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^30( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data30bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^60( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data60bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^120( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data120bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1765,11 +1857,19 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
<expected-output>DATAFILE30_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/data30bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^30( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/data30bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1872,7 +1972,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^put: `wrongdir': specified destination directory doest not exist</expected-output>
+ <expected-output>^put: `wrongdir': specified destination directory does not exist</expected-output>
</comparator>
</comparators>
</test>
@@ -1915,7 +2015,11 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/data15bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -1961,6 +2065,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/dir1/data/data15bytes</expected-output>
</comparator>
<comparator>
@@ -1974,6 +2079,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>DATAFILE120_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/dir1/data/data120bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data15bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^30( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data30bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^60( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data60bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^120( |\t)*hdfs://localhost.*:[0-9]*/user/[a-zA-Z0-9]*/dir0/dir1/data/data120bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2021,11 +2141,19 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^DATAFILE15_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
<expected-output>DATAFILE30_SIZE( |\t)*NAMENODE/user/[a-zA-Z0-9]*/dir0/data30bytes</expected-output>
+=======
+ <expected-output>^15( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/data15bytes</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^30( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/data30bytes</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2128,7 +2256,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^copyFromLocal: `wrongdir': specified destination directory doest not exist</expected-output>
+ <expected-output>^copyFromLocal: `wrongdir': specified destination directory does not exist</expected-output>
</comparator>
</comparators>
</test>
@@ -2419,7 +2547,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^NAMENODE/user/[a-zA-Z0-9]*/dir0(|\t)*0</expected-output>
+=======
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0(|\t)*0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2471,6 +2603,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^NAMENODE/user/[a-zA-Z0-9]*/dir0(|\t)*0</expected-output>
</comparator>
<comparator>
@@ -2484,6 +2617,21 @@
<comparator>
<type>RegexpComparator</type>
<expected-output>^NAMENODE/user/[a-zA-Z0-9]*/dir3(|\t)*0</expected-output>
+=======
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0(|\t)*0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir1(|\t)*0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir2(|\t)*0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir3(|\t)*0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2552,7 +2700,11 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Replication 2 set: NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
+=======
+ <expected-output>^Replication 2 set: hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2592,11 +2744,19 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^Replication 2 set: NAMENODE/user/[a-zA-Z0-9]*/dir0/file0</expected-output>
</comparator>
<comparator>
<type>RegexpComparator</type>
<expected-output>^Replication 2 set: NAMENODE/user/[a-zA-Z0-9]*/dir0/file1</expected-output>
+=======
+ <expected-output>^Replication 2 set: hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/file0</expected-output>
+ </comparator>
+ <comparator>
+ <type>RegexpComparator</type>
+ <expected-output>^Replication 2 set: hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/dir0/file1</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2672,7 +2832,11 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^0( |\t)*NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
+=======
+ <expected-output>^0( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -2693,9 +2857,15 @@
</comparator>
<comparator>
<type>RegexpComparator</type>
+<<<<<<< HEAD
<expected-output>^0( |\t)*NAMENODE/user/[a-zA-Z0-9]*/file0</expected-output>
<expected-output>^0( |\t)*NAMENODE/user/[a-zA-Z0-9]*/file1</expected-output>
<expected-output>^0( |\t)*NAMENODE/user/[a-zA-Z0-9]*/file2</expected-output>
+=======
+ <expected-output>^0( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file0</expected-output>
+ <expected-output>^0( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file1</expected-output>
+ <expected-output>^0( |\t)*hdfs://localhost[.a-z]*:[0-9]*/user/[a-zA-Z0-9]*/file2</expected-output>
+>>>>>>> branch-1
</comparator>
</comparators>
</test>
@@ -3161,7 +3331,7 @@
<comparators>
<comparator>
<type>RegexpComparator</type>
- <expected-output>^moveFromLocal: `wrongdir': specified destination directory doest not exist</expected-output>
+ <expected-output>^moveFromLocal: `wrongdir': specified destination directory does not exist</expected-output>
</comparator>
</comparators>
</test>
Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xsl
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xsl?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xsl (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/cli/testConf.xsl Fri Jun 21 06:37:27 2013
@@ -1,4 +1,17 @@
<?xml version="1.0" encoding="ISO-8859-1"?>
+<!--
+ Licensed under the Apache License, Version 2.0 (the "License");
+ you may not use this file except in compliance with the License.
+ You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfServlet.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfServlet.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfServlet.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,113 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.conf;
+
+import static org.junit.Assert.assertEquals;
+import static org.junit.Assert.assertTrue;
+import static org.junit.Assert.fail;
+
+import java.io.StringReader;
+import java.io.StringWriter;
+import java.util.Map;
+
+import javax.xml.parsers.DocumentBuilder;
+import javax.xml.parsers.DocumentBuilderFactory;
+
+import org.junit.Test;
+import org.mortbay.util.ajax.JSON;
+import org.w3c.dom.Document;
+import org.w3c.dom.Element;
+import org.w3c.dom.Node;
+import org.w3c.dom.NodeList;
+import org.xml.sax.InputSource;
+
+/**
+ * Basic test case that the ConfServlet can write configuration
+ * to its output in XML and JSON format.
+ */
+public class TestConfServlet {
+ private static final String TEST_KEY = "testconfservlet.key";
+ private static final String TEST_VAL = "testval";
+
+ private Configuration getTestConf() {
+ Configuration testConf = new Configuration();
+ testConf.set(TEST_KEY, TEST_VAL);
+ return testConf;
+ }
+
+ @Test
+ @SuppressWarnings("unchecked")
+ public void testWriteJson() throws Exception {
+ StringWriter sw = new StringWriter();
+ ConfServlet.writeResponse(getTestConf(), sw, "json");
+ String json = sw.toString();
+ boolean foundSetting = false;
+ Object parsed = JSON.parse(json);
+ Object[] properties = ((Map<String, Object[]>)parsed).get("properties");
+ for (Object o : properties) {
+ Map<String, Object> propertyInfo = (Map<String, Object>)o;
+ String key = (String)propertyInfo.get("key");
+ String val = (String)propertyInfo.get("value");
+ String resource = (String)propertyInfo.get("resource");
+ System.err.println("k: " + key + " v: " + val + " r: " + resource);
+ if (TEST_KEY.equals(key) && TEST_VAL.equals(val)
+ && Configuration.UNKNOWN_RESOURCE.equals(resource)) {
+ foundSetting = true;
+ }
+ }
+ assertTrue(foundSetting);
+ }
+
+ @Test
+ public void testWriteXml() throws Exception {
+ StringWriter sw = new StringWriter();
+ ConfServlet.writeResponse(getTestConf(), sw, "xml");
+ String xml = sw.toString();
+
+ DocumentBuilderFactory docBuilderFactory
+ = DocumentBuilderFactory.newInstance();
+ DocumentBuilder builder = docBuilderFactory.newDocumentBuilder();
+ Document doc = builder.parse(new InputSource(new StringReader(xml)));
+ NodeList nameNodes = doc.getElementsByTagName("name");
+ boolean foundSetting = false;
+ for (int i = 0; i < nameNodes.getLength(); i++) {
+ Node nameNode = nameNodes.item(i);
+ String key = nameNode.getTextContent();
+ System.err.println("xml key: " + key);
+ if (TEST_KEY.equals(key)) {
+ foundSetting = true;
+ Element propertyElem = (Element)nameNode.getParentNode();
+ String val = propertyElem.getElementsByTagName("value").item(0).getTextContent();
+ assertEquals(TEST_VAL, val);
+ }
+ }
+ assertTrue(foundSetting);
+ }
+
+ @Test
+ public void testBadFormat() throws Exception {
+ StringWriter sw = new StringWriter();
+ try {
+ ConfServlet.writeResponse(getTestConf(), sw, "not a format");
+ fail("writeResponse with bad format didn't throw!");
+ } catch (ConfServlet.BadFormatException bfe) {
+ // expected
+ }
+ assertEquals("", sw.toString());
+ }
+}
\ No newline at end of file
Modified: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfiguration.java?rev=1495297&r1=1495296&r2=1495297&view=diff
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfiguration.java (original)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestConfiguration.java Fri Jun 21 06:37:27 2013
@@ -625,5 +625,14 @@ public class TestConfiguration extends T
assertTrue("Picked out wrong key " + key3, !res.containsKey(key3));
assertTrue("Picked out wrong key " + key4, !res.containsKey(key4));
}
+
+ public void testUnset() {
+ Configuration conf = new Configuration();
+ conf.set("foo", "bar");
+ assertNotNull(conf.get("foo"));
+ conf.unset("foo");
+ assertNull(conf.get("foo"));
+ }
+
}
Added: hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestGetInstances.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestGetInstances.java?rev=1495297&view=auto
==============================================================================
--- hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestGetInstances.java (added)
+++ hadoop/common/branches/branch-1-win/src/test/org/apache/hadoop/conf/TestGetInstances.java Fri Jun 21 06:37:27 2013
@@ -0,0 +1,74 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.conf;
+
+import java.util.List;
+
+import junit.framework.TestCase;
+
+public class TestGetInstances extends TestCase {
+
+ interface SampleInterface {}
+
+ interface ChildInterface extends SampleInterface {}
+
+ static class SampleClass implements SampleInterface {
+ SampleClass() {}
+ }
+
+ static class AnotherClass implements ChildInterface {
+ AnotherClass() {}
+ }
+
+ /**
+ * Makes sure <code>Configuration.getInstances()</code> returns
+ * instances of the required type.
+ */
+ public void testGetInstances() throws Exception {
+ Configuration conf = new Configuration();
+
+ List<SampleInterface> classes =
+ conf.getInstances("no.such.property", SampleInterface.class);
+ assertTrue(classes.isEmpty());
+
+ conf.set("empty.property", "");
+ classes = conf.getInstances("empty.property", SampleInterface.class);
+ assertTrue(classes.isEmpty());
+
+ conf.setStrings("some.classes",
+ SampleClass.class.getName(), AnotherClass.class.getName());
+ classes = conf.getInstances("some.classes", SampleInterface.class);
+ assertEquals(2, classes.size());
+
+ try {
+ conf.setStrings("some.classes",
+ SampleClass.class.getName(), AnotherClass.class.getName(),
+ String.class.getName());
+ conf.getInstances("some.classes", SampleInterface.class);
+ fail("java.lang.String does not implement SampleInterface");
+ } catch (RuntimeException e) {}
+
+ try {
+ conf.setStrings("some.classes",
+ SampleClass.class.getName(), AnotherClass.class.getName(),
+ "no.such.Class");
+ conf.getInstances("some.classes", SampleInterface.class);
+ fail("no.such.Class does not exist");
+ } catch (RuntimeException e) {}
+ }
+}