You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by aw...@apache.org on 2015/07/31 23:35:05 UTC

[1/2] hadoop git commit: HADOOP-12249. pull argument parsing into a function (aw)

Repository: hadoop
Updated Branches:
  refs/heads/trunk d0e0ba801 -> a890a3152


HADOOP-12249. pull argument parsing into a function (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/666cafca
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/666cafca
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/666cafca

Branch: refs/heads/trunk
Commit: 666cafca8d3c928f3470a03ae9dedb27e27f8f0e
Parents: d0e0ba8
Author: Allen Wittenauer <aw...@apache.org>
Authored: Fri Jul 31 14:32:21 2015 -0700
Committer: Allen Wittenauer <aw...@apache.org>
Committed: Fri Jul 31 14:32:21 2015 -0700

----------------------------------------------------------------------
 hadoop-common-project/hadoop-common/CHANGES.txt |   2 +
 .../hadoop-common/src/main/bin/hadoop           |   8 +-
 .../hadoop-common/src/main/bin/hadoop-config.sh |  73 +-----
 .../src/main/bin/hadoop-functions.sh            | 255 ++++++++++++++++---
 .../hadoop-common/src/main/bin/slaves.sh        |   3 +-
 .../hadoop-kms/src/main/sbin/kms.sh             |  29 +--
 .../hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh  |  29 +--
 .../hadoop-hdfs/src/main/bin/hdfs               |  10 +-
 .../hadoop-hdfs/src/main/bin/start-balancer.sh  |  12 +-
 .../hadoop-hdfs/src/main/bin/stop-balancer.sh   |  10 +-
 hadoop-mapreduce-project/bin/mapred             |   2 +-
 .../hadoop-yarn/bin/start-yarn.sh               |   7 +-
 .../hadoop-yarn/bin/stop-yarn.sh                |   7 +-
 hadoop-yarn-project/hadoop-yarn/bin/yarn        |   7 +
 14 files changed, 296 insertions(+), 158 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 3c7e5c3..8d0795b 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -234,6 +234,8 @@ Trunk (Unreleased)
 
     HADOOP-10979. Auto-entries in hadoop_usage (aw)
 
+    HADOOP-12249. pull argument parsing into a function (aw)
+
   BUG FIXES
 
     HADOOP-11473. test-patch says "-1 overall" even when all checks are +1

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-common/src/main/bin/hadoop
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
index c5444d6..ef67cc5 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop
@@ -17,8 +17,14 @@
 
 MYNAME="${BASH_SOURCE-$0}"
 
-function hadoop_usage()
+function hadoop_usage
 {
+  hadoop_add_option "buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "hostnames list[,of,host,names]" "hosts to use in slave mode"
+  hadoop_add_option "loglevel level" "set the log4j level for this command"
+  hadoop_add_option "hosts filename" "list of hosts to use in slave mode"
+  hadoop_add_option "slaves" "turn on slave mode"
+
   hadoop_add_subcommand "archive" "create a Hadoop archive"
   hadoop_add_subcommand "checknative" "check native Hadoop and compression libraries availability"
   hadoop_add_subcommand "classpath" "prints the class path needed to get the Hadoop jar and the required libraries"

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
old mode 100644
new mode 100755
index 58b871e..0b52895
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-config.sh
@@ -53,7 +53,7 @@ if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
 fi
 
 # get our functions defined for usage later
-if [[ -n "${HADOOP_COMMON_HOME}" ]] && 
+if [[ -n "${HADOOP_COMMON_HOME}" ]] &&
    [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-functions.sh" ]]; then
   . "${HADOOP_COMMON_HOME}/libexec/hadoop-functions.sh"
 elif [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-functions.sh" ]]; then
@@ -93,75 +93,8 @@ hadoop_bootstrap
 # shellcheck disable=SC2034
 HADOOP_USER_PARAMS=("$@")
 
-HADOOP_DAEMON_MODE="default"
-
-while [[ -z "${_hadoop_common_done}" ]]; do
-  case $1 in
-    --buildpaths)
-      # shellcheck disable=SC2034
-      HADOOP_ENABLE_BUILD_PATHS=true
-      shift
-    ;;
-    --config)
-      shift
-      confdir=$1
-      shift
-      if [[ -d "${confdir}" ]]; then
-        # shellcheck disable=SC2034
-        HADOOP_CONF_DIR="${confdir}"
-      elif [[ -z "${confdir}" ]]; then
-        hadoop_error "ERROR: No parameter provided for --config "
-        hadoop_exit_with_usage 1
-      else
-        hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
-        hadoop_exit_with_usage 1
-      fi
-    ;;
-    --daemon)
-      shift
-      HADOOP_DAEMON_MODE=$1
-      shift
-      if [[ -z "${HADOOP_DAEMON_MODE}" || \
-        ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
-        hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
-        hadoop_exit_with_usage 1
-      fi
-    ;;
-    --debug)
-      shift
-      # shellcheck disable=SC2034
-      HADOOP_SHELL_SCRIPT_DEBUG=true
-    ;; 
-    --help|-help|-h|help|--h|--\?|-\?|\?)
-      hadoop_exit_with_usage 0
-    ;;
-    --hostnames)
-      shift
-      # shellcheck disable=SC2034
-      HADOOP_SLAVE_NAMES="$1"
-      shift
-    ;;
-    --hosts)
-      shift
-      hadoop_populate_slaves_file "$1"
-      shift
-    ;;
-    --loglevel)
-      shift
-      # shellcheck disable=SC2034
-      HADOOP_LOGLEVEL="$1"
-      shift
-    ;;
-    --slaves)
-      shift
-      # shellcheck disable=SC2034
-      HADOOP_SLAVE_MODE=true
-    ;;
-    *)
-      _hadoop_common_done=true
-    ;;
-  esac
-done
+hadoop_parse_args "$@"
+shift "${HADOOP_PARSE_COUNTER}"
 
 #
 # Setup the base-line environment

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 6ebbee1..5e2a2e8 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -16,7 +16,8 @@
 
 # we need to declare this globally as an array, which can only
 # be done outside of a function
-declare -a HADOOP_USAGE=()
+declare -a HADOOP_SUBCMD_USAGE
+declare -a HADOOP_OPTION_USAGE
 
 ## @description  Print a message to stderr
 ## @audience     public
@@ -48,53 +49,72 @@ function hadoop_debug
 ## @param        subcommanddesc
 function hadoop_add_subcommand
 {
+  local subcmd=$1
+  local text=$2
+
+  HADOOP_SUBCMD_USAGE[${HADOOP_SUBCMD_USAGE_COUNTER}]="${subcmd}@${text}"
+  ((HADOOP_SUBCMD_USAGE_COUNTER=HADOOP_SUBCMD_USAGE_COUNTER+1))
+}
+
+## @description  Add an option to the usage output
+## @audience     private
+## @stability    evolving
+## @replaceable  no
+## @param        subcommand
+## @param        subcommanddesc
+function hadoop_add_option
+{
   local option=$1
   local text=$2
 
-  HADOOP_USAGE[${HADOOP_USAGE_COUNTER}]="${option}@${text}"
-  ((HADOOP_USAGE_COUNTER=HADOOP_USAGE_COUNTER+1))
+  HADOOP_OPTION_USAGE[${HADOOP_OPTION_USAGE_COUNTER}]="${option}@${text}"
+  ((HADOOP_OPTION_USAGE_COUNTER=HADOOP_OPTION_USAGE_COUNTER+1))
 }
 
-## @description  generate standard usage output
-## @description  and optionally takes a class
+## @description  Reset the usage information to blank
 ## @audience     private
 ## @stability    evolving
 ## @replaceable  no
-## @param        execname
-## @param        [true|false]
-function hadoop_generate_usage
+function hadoop_reset_usage
 {
-  local cmd=$1
-  local takesclass=$2
-  local i
-  local counter
-  local line
-  local option
-  local giventext
-  local maxoptsize
-  local foldsize=75
+  HADOOP_SUBCMD_USAGE=()
+  HADOOP_OPTION_USAGE=()
+  HADOOP_SUBCMD_USAGE_COUNTER=0
+  HADOOP_OPTION_USAGE_COUNTER=0
+}
+
+## @description  Print a screen-size aware two-column output
+## @audience     private
+## @stability    evolving
+## @replaceable  no
+## @param        array
+function hadoop_generic_columnprinter
+{
+  declare -a input=("$@")
+  declare -i i=0
+  declare -i counter=0
+  declare line
+  declare text
+  declare option
+  declare giventext
+  declare -i maxoptsize
+  declare -i foldsize
   declare -a tmpa
+  declare numcols
 
-  cmd=${cmd##*/}
+  if [[ -n "${COLUMNS}" ]]; then
+    numcols=${COLUMNS}
+  else
+    numcols=$(tput cols) 2>/dev/null
+  fi
 
-  echo "Usage: ${cmd} [OPTIONS] SUBCOMMAND [SUBCOMMAND OPTIONS]"
-  if [[ ${takesclass} = true ]]; then
-    echo " or    ${cmd} [OPTIONS] CLASSNAME [CLASSNAME OPTIONS]"
-    echo "  where CLASSNAME is a user-provided Java class"
+  if [[ -z "${numcols}"
+     || ! "${numcols}" =~ ^[0-9]+$ ]]; then
+    numcols=75
+  else
+    ((numcols=numcols-5))
   fi
-  echo ""
-  echo "  OPTIONS is none or any of:"
-  echo "     --config confdir"
-  echo "     --daemon (start|stop|status)"
-  echo "     --debug"
-  echo "     --hostnames list[,of,host,names]"
-  echo "     --hosts filename"
-  echo "     --loglevel loglevel"
-  echo "     --slaves"
-  echo ""
-  echo "  SUBCOMMAND is one of:"
-
-  counter=0
+
   while read -r line; do
     tmpa[${counter}]=${line}
     ((counter=counter+1))
@@ -102,12 +122,12 @@ function hadoop_generate_usage
     if [[ ${#option} -gt ${maxoptsize} ]]; then
       maxoptsize=${#option}
     fi
-  done < <(for i in "${HADOOP_USAGE[@]}"; do
-    echo "${i}"
+  done < <(for text in "${input[@]}"; do
+    echo "${text}"
   done | sort)
 
   i=0
-  ((foldsize=75-maxoptsize))
+  ((foldsize=numcols-maxoptsize))
 
   until [[ $i -eq ${#tmpa[@]} ]]; do
     option=$(echo "${tmpa[$i]}" | cut -f1 -d'@')
@@ -119,8 +139,63 @@ function hadoop_generate_usage
     done < <(echo "${giventext}"| fold -s -w ${foldsize})
     ((i=i+1))
   done
-  echo ""
-  echo "Most subcommands print help when invoked w/o parameters or with -h."
+}
+
+## @description  generate standard usage output
+## @description  and optionally takes a class
+## @audience     private
+## @stability    evolving
+## @replaceable  no
+## @param        execname
+## @param        true|false
+## @param        [text to use in place of SUBCOMMAND]
+function hadoop_generate_usage
+{
+  local cmd=$1
+  local takesclass=$2
+  local subcmdtext=${3:-"SUBCOMMAND"}
+  local haveoptions
+  local optstring
+  local havesubs
+  local subcmdstring
+
+  cmd=${cmd##*/}
+
+  if [[ -n "${HADOOP_OPTION_USAGE_COUNTER}"
+        && "${HADOOP_OPTION_USAGE_COUNTER}" -gt 0 ]]; then
+    haveoptions=true
+    optstring=" [OPTIONS]"
+  fi
+
+  if [[ -n "${HADOOP_SUBCMD_USAGE_COUNTER}"
+        && "${HADOOP_SUBCMD_USAGE_COUNTER}" -gt 0 ]]; then
+    havesubs=true
+    subcmdstring=" ${subcmdtext} [${subcmdtext} OPTIONS]"
+  fi
+
+  echo "Usage: ${cmd}${optstring}${subcmdstring}"
+  if [[ ${takesclass} = true ]]; then
+    echo " or    ${cmd}${optstring} CLASSNAME [CLASSNAME OPTIONS]"
+    echo "  where CLASSNAME is a user-provided Java class"
+  fi
+
+  if [[ "${haveoptions}" = true ]]; then
+    echo ""
+    echo "  OPTIONS is none or any of:"
+    echo ""
+
+    hadoop_generic_columnprinter "${HADOOP_OPTION_USAGE[@]}"
+  fi
+
+  if [[ "${havesubs}" = true ]]; then
+    echo ""
+    echo "  ${subcmdtext} is one of:"
+    echo ""
+
+    hadoop_generic_columnprinter "${HADOOP_SUBCMD_USAGE[@]}"
+    echo ""
+    echo "${subcmdtext} may print help when invoked w/o parameters or with -h."
+  fi
 }
 
 ## @description  Replace `oldvar` with `newvar` if `oldvar` exists.
@@ -189,7 +264,7 @@ function hadoop_bootstrap
   TOOL_PATH=${TOOL_PATH:-${HADOOP_PREFIX}/share/hadoop/tools/lib/*}
 
   # usage output set to zero
-  HADOOP_USAGE_COUNTER=0
+  hadoop_reset_usage
 
   export HADOOP_OS_TYPE=${HADOOP_OS_TYPE:-$(uname -s)}
 
@@ -1730,3 +1805,101 @@ function hadoop_do_classpath_subcommand
     exit 0
   fi
 }
+
+## @description  generic shell script opton parser.  sets
+## @description  HADOOP_PARSE_COUNTER to set number the
+## @description  caller should shift
+## @audience     private
+## @stability    evolving
+## @replaceable  yes
+## @param        [parameters, typically "$@"]
+function hadoop_parse_args
+{
+  HADOOP_DAEMON_MODE="default"
+  HADOOP_PARSE_COUNTER=0
+
+  # not all of the options supported here are supported by all commands
+  # however these are:
+  hadoop_add_option "--config dir" "Hadoop config directory"
+  hadoop_add_option "--debug" "turn on shell script debug mode"
+  hadoop_add_option "--help" "usage information"
+
+  while true; do
+    hadoop_debug "hadoop_parse_args: processing $1"
+    case $1 in
+      --buildpaths)
+        # shellcheck disable=SC2034
+        HADOOP_ENABLE_BUILD_PATHS=true
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
+      ;;
+      --config)
+        shift
+        confdir=$1
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
+        if [[ -d "${confdir}" ]]; then
+          # shellcheck disable=SC2034
+          HADOOP_CONF_DIR="${confdir}"
+        elif [[ -z "${confdir}" ]]; then
+          hadoop_error "ERROR: No parameter provided for --config "
+          hadoop_exit_with_usage 1
+        else
+          hadoop_error "ERROR: Cannot find configuration directory \"${confdir}\""
+          hadoop_exit_with_usage 1
+        fi
+      ;;
+      --daemon)
+        shift
+        HADOOP_DAEMON_MODE=$1
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
+        if [[ -z "${HADOOP_DAEMON_MODE}" || \
+          ! "${HADOOP_DAEMON_MODE}" =~ ^st(art|op|atus)$ ]]; then
+          hadoop_error "ERROR: --daemon must be followed by either \"start\", \"stop\", or \"status\"."
+          hadoop_exit_with_usage 1
+        fi
+      ;;
+      --debug)
+        shift
+        # shellcheck disable=SC2034
+        HADOOP_SHELL_SCRIPT_DEBUG=true
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
+      ;;
+      --help|-help|-h|help|--h|--\?|-\?|\?)
+        hadoop_exit_with_usage 0
+      ;;
+      --hostnames)
+        shift
+        # shellcheck disable=SC2034
+        HADOOP_SLAVE_NAMES="$1"
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
+      ;;
+      --hosts)
+        shift
+        hadoop_populate_slaves_file "$1"
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
+      ;;
+      --loglevel)
+        shift
+        # shellcheck disable=SC2034
+        HADOOP_LOGLEVEL="$1"
+        shift
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+2))
+      ;;
+      --slaves)
+        shift
+        # shellcheck disable=SC2034
+        HADOOP_SLAVE_MODE=true
+        ((HADOOP_PARSE_COUNTER=HADOOP_PARSE_COUNTER+1))
+      ;;
+      *)
+        break
+      ;;
+    esac
+  done
+
+  hadoop_debug "hadoop_parse: asking caller to skip ${HADOOP_PARSE_COUNTER}"
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
index a8f0660..2fdf18b 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/slaves.sh
@@ -27,7 +27,8 @@
 #   HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
 ##
 
-function hadoop_usage {
+function hadoop_usage
+{
   echo "Usage: slaves.sh [--config confdir] command..."
 }
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
old mode 100644
new mode 100755
index 9228d2d..1191eb9
--- a/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
+++ b/hadoop-common-project/hadoop-kms/src/main/sbin/kms.sh
@@ -13,28 +13,27 @@
 #  limitations under the License.
 #
 
-function hadoop_usage()
+MYNAME="${BASH_SOURCE-$0}"
+
+function hadoop_usage
 {
-  echo "Usage: kms.sh [--config confdir] [--debug] --daemon start|status|stop"
-  echo "       kms.sh [--config confdir] [--debug] COMMAND"
-  echo "            where COMMAND is one of:"
-  echo "  run               Start kms in the current window"
-  echo "  run -security     Start in the current window with security manager"
-  echo "  start             Start kms in a separate window"
-  echo "  start -security   Start in a separate window with security manager"
-  echo "  status            Return the LSB compliant status"
-  echo "  stop              Stop kms, waiting up to 5 seconds for the process to end"
-  echo "  stop n            Stop kms, waiting up to n seconds for the process to end"
-  echo "  stop -force       Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
-  echo "  stop n -force     Stop kms, wait up to n seconds and then use kill -KILL if still running"
+  hadoop_add_subcommand "run" "Start kms in the current window"
+  hadoop_add_subcommand "run -security" "Start in the current window with security manager"
+  hadoop_add_subcommand "start" "Start kms in a separate window"
+  hadoop_add_subcommand "start -security" "Start in a separate window with security manager"
+  hadoop_add_subcommand "status" "Return the LSB compliant status"
+  hadoop_add_subcommand "stop" "Stop kms, waiting up to 5 seconds for the process to end"
+  hadoop_add_subcommand "top n" "Stop kms, waiting up to n seconds for the process to end"
+  hadoop_add_subcommand "stop -force" "Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
+  hadoop_add_subcommand "stop n -force" "Stop kms, wait up to n seconds and then use kill -KILL if still running"
+  hadoop_generate_usage "${MYNAME}" false
 }
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then
   DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
 else
-  this="${BASH_SOURCE-$0}"
-  bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
old mode 100644
new mode 100755
index f51a5e6..9b819aa
--- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/main/sbin/httpfs.sh
@@ -13,28 +13,27 @@
 #  limitations under the License.
 #
 
-function hadoop_usage()
+MYNAME="${BASH_SOURCE-$0}"
+
+function hadoop_usage
 {
-  echo "Usage: httpfs.sh [--config confdir] [--debug] --daemon start|status|stop"
-  echo "       httpfs.sh [--config confdir] [--debug] COMMAND"
-  echo "            where COMMAND is one of:"
-  echo "  run               Start httpfs in the current window"
-  echo "  run -security     Start in the current window with security manager"
-  echo "  start             Start httpfs in a separate window"
-  echo "  start -security   Start in a separate window with security manager"
-  echo "  status            Return the LSB compliant status"
-  echo "  stop              Stop httpfs, waiting up to 5 seconds for the process to end"
-  echo "  stop n            Stop httpfs, waiting up to n seconds for the process to end"
-  echo "  stop -force       Stop httpfs, wait up to 5 seconds and then use kill -KILL if still running"
-  echo "  stop n -force     Stop httpfs, wait up to n seconds and then use kill -KILL if still running"
+  hadoop_add_subcommand "run" "Start kms in the current window"
+  hadoop_add_subcommand "run -security" "Start in the current window with security manager"
+  hadoop_add_subcommand "start" "Start kms in a separate window"
+  hadoop_add_subcommand "start -security" "Start in a separate window with security manager"
+  hadoop_add_subcommand "status" "Return the LSB compliant status"
+  hadoop_add_subcommand "stop" "Stop kms, waiting up to 5 seconds for the process to end"
+  hadoop_add_subcommand "top n" "Stop kms, waiting up to n seconds for the process to end"
+  hadoop_add_subcommand "stop -force" "Stop kms, wait up to 5 seconds and then use kill -KILL if still running"
+  hadoop_add_subcommand "stop n -force" "Stop kms, wait up to n seconds and then use kill -KILL if still running"
+  hadoop_generate_usage "${MYNAME}" false
 }
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then
   DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
 else
-  this="${BASH_SOURCE-$0}"
-  bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+  bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
   DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
 fi
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
index 23a08be..852b040 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/hdfs
@@ -19,6 +19,13 @@ MYNAME="${BASH_SOURCE-$0}"
 
 function hadoop_usage
 {
+  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
+  hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in slave mode"
+  hadoop_add_option "--loglevel level" "set the log4j level for this command"
+  hadoop_add_option "--hosts filename" "list of hosts to use in slave mode"
+  hadoop_add_option "--slaves" "turn on slave mode"
+
   hadoop_add_subcommand "balancer" "run a cluster balancing utility"
   hadoop_add_subcommand "cacheadmin" "configure the HDFS cache"
   hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"
@@ -47,8 +54,7 @@ function hadoop_usage
   hadoop_add_subcommand "storagepolicies" "list/get/set block storage policies"
   hadoop_add_subcommand "version" "print the version"
   hadoop_add_subcommand "zkfc" "run the ZK Failover Controller daemon"
-  hadoop_generate_usage "${MYNAME}"
-
+  hadoop_generate_usage "${MYNAME}" false
 }
 
 # let's locate libexec...

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
index 321f9c9..cbf6170 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/start-balancer.sh
@@ -15,13 +15,19 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+MYNAME="${BASH_SOURCE-$0}"
+
 function hadoop_usage
 {
-  echo "Usage: start-balancer.sh [--config confdir]  [-policy <policy>] [-threshold <threshold>]"
+  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "--loglevel level" "set the log4j level for this command"
+
+  hadoop_add_option "-policy <policy>" "set the balancer's policy"
+  hadoop_add_option "-threshold <threshold>" "set the threshold for balancing"
+  hadoop_generate_usage "${MYNAME}" false
 }
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
----------------------------------------------------------------------
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
index da25d46..268cf90 100755
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/bin/stop-balancer.sh
@@ -15,13 +15,17 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
+MYNAME="${BASH_SOURCE-$0}"
+
 function hadoop_usage
 {
-  echo "Usage: stop-balancer.sh [--config confdir]"
+  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "--loglevel level" "set the log4j level for this command"
+
+  hadoop_generate_usage "${MYNAME}" false
 }
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-mapreduce-project/bin/mapred
----------------------------------------------------------------------
diff --git a/hadoop-mapreduce-project/bin/mapred b/hadoop-mapreduce-project/bin/mapred
index 8c16369..426af80 100755
--- a/hadoop-mapreduce-project/bin/mapred
+++ b/hadoop-mapreduce-project/bin/mapred
@@ -29,7 +29,7 @@ function hadoop_usage
   hadoop_add_subcommand "queue" "get information regarding JobQueues"
   hadoop_add_subcommand "sampler" "sampler"
   hadoop_add_subcommand "version" "print the version"
-  hadoop_generate_usage "${MYNAME}"
+  hadoop_generate_usage "${MYNAME}" true
 }
 
 bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh
old mode 100644
new mode 100755
index ac18089..1172c60
--- a/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh
+++ b/hadoop-yarn-project/hadoop-yarn/bin/start-yarn.sh
@@ -16,13 +16,14 @@
 # limitations under the License.
 
 
+MYNAME="${BASH_SOURCE-$0}"
+
 function hadoop_usage
 {
-  echo "Usage: start-yarn.sh [--config confdir]"
+  hadoop_generate_usage "${MYNAME}" false
 }
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh
old mode 100644
new mode 100755
index d85b44e..ffa4cfc
--- a/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh
+++ b/hadoop-yarn-project/hadoop-yarn/bin/stop-yarn.sh
@@ -16,13 +16,14 @@
 # limitations under the License.
 
 
+MYNAME="${BASH_SOURCE-$0}"
+
 function hadoop_usage
 {
-  echo "Usage: stop-yarn.sh [--config confdir]"
+  hadoop_generate_usage "${MYNAME}" false
 }
 
-this="${BASH_SOURCE-$0}"
-bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+bin=$(cd -P -- "$(dirname -- "${MYNAME}")" >/dev/null && pwd -P)
 
 # let's locate libexec...
 if [[ -n "${HADOOP_PREFIX}" ]]; then

http://git-wip-us.apache.org/repos/asf/hadoop/blob/666cafca/hadoop-yarn-project/hadoop-yarn/bin/yarn
----------------------------------------------------------------------
diff --git a/hadoop-yarn-project/hadoop-yarn/bin/yarn b/hadoop-yarn-project/hadoop-yarn/bin/yarn
index 50607c8..f0bed9b 100755
--- a/hadoop-yarn-project/hadoop-yarn/bin/yarn
+++ b/hadoop-yarn-project/hadoop-yarn/bin/yarn
@@ -19,6 +19,13 @@ MYNAME="${BASH_SOURCE-$0}"
 
 function hadoop_usage
 {
+  hadoop_add_option "--buildpaths" "attempt to add class files from build tree"
+  hadoop_add_option "--daemon (start|status|stop)" "operate on a daemon"
+  hadoop_add_option "--hostnames list[,of,host,names]" "hosts to use in slave mode"
+  hadoop_add_option "--loglevel level" "set the log4j level for this command"
+  hadoop_add_option "--hosts filename" "list of hosts to use in slave mode"
+  hadoop_add_option "--slaves" "turn on slave mode"
+
   hadoop_add_subcommand "application" "prints application(s) report/kill application"
   hadoop_add_subcommand "applicationattempt" "prints applicationattempt(s) report"
   hadoop_add_subcommand "classpath" "prints the class path needed to get the hadoop jar and the required libraries"


[2/2] hadoop git commit: HADOOP-10854. unit tests for the shell scripts (aw)

Posted by aw...@apache.org.
HADOOP-10854. unit tests for the shell scripts (aw)


Project: http://git-wip-us.apache.org/repos/asf/hadoop/repo
Commit: http://git-wip-us.apache.org/repos/asf/hadoop/commit/a890a315
Tree: http://git-wip-us.apache.org/repos/asf/hadoop/tree/a890a315
Diff: http://git-wip-us.apache.org/repos/asf/hadoop/diff/a890a315

Branch: refs/heads/trunk
Commit: a890a31529cc625326cd3749a4960ad7c02fc6fe
Parents: 666cafc
Author: Allen Wittenauer <aw...@apache.org>
Authored: Fri Jul 31 14:34:48 2015 -0700
Committer: Allen Wittenauer <aw...@apache.org>
Committed: Fri Jul 31 14:34:48 2015 -0700

----------------------------------------------------------------------
 BUILDING.txt                                    |   4 +-
 dev-support/docker/Dockerfile                   |   8 +
 hadoop-common-project/hadoop-common/CHANGES.txt |   2 +
 hadoop-common-project/hadoop-common/pom.xml     |  33 +++
 .../src/main/bin/hadoop-functions.sh            | 114 +++++++---
 .../scripts/hadoop-functions_test_helper.bash   |  56 +++++
 .../src/test/scripts/hadoop_add_classpath.bats  | 100 +++++++++
 .../src/test/scripts/hadoop_add_colonpath.bats  |  96 +++++++++
 .../scripts/hadoop_add_common_to_classpath.bats |  71 +++++++
 .../test/scripts/hadoop_add_javalibpath.bats    |  98 +++++++++
 .../src/test/scripts/hadoop_add_ldlibpath.bats  |  97 +++++++++
 .../src/test/scripts/hadoop_add_param.bats      |  49 +++++
 .../hadoop_add_to_classpath_userpath.bats       |  98 +++++++++
 .../src/test/scripts/hadoop_basic_init.bats     |  94 +++++++++
 .../src/test/scripts/hadoop_bootstrap.bats      |  51 +++++
 .../src/test/scripts/hadoop_confdir.bats        |  92 +++++++++
 .../test/scripts/hadoop_deprecate_envvar.bats   |  32 +++
 .../src/test/scripts/hadoop_finalize.bats       | 206 +++++++++++++++++++
 .../scripts/hadoop_finalize_catalina_opts.bats  |  56 +++++
 .../test/scripts/hadoop_finalize_classpath.bats |  64 ++++++
 .../scripts/hadoop_finalize_hadoop_heap.bats    |  87 ++++++++
 .../scripts/hadoop_finalize_hadoop_opts.bats    |  52 +++++
 .../test/scripts/hadoop_finalize_libpaths.bats  |  30 +++
 .../src/test/scripts/hadoop_java_setup.bats     |  47 +++++
 .../src/test/scripts/hadoop_os_tricks.bats      |  34 +++
 .../src/test/scripts/hadoop_rotate_log.bats     |  52 +++++
 .../src/test/scripts/hadoop_shellprofile.bats   |  91 ++++++++
 .../src/test/scripts/hadoop_slaves.bats         |  37 ++++
 .../src/test/scripts/hadoop_ssh.bats            |  51 +++++
 .../scripts/hadoop_translate_cygwin_path.bats   |  48 +++++
 .../test/scripts/hadoop_validate_classname.bats |  26 +++
 .../hadoop-common/src/test/scripts/run-bats.sh  |  43 ++++
 32 files changed, 1988 insertions(+), 31 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/BUILDING.txt
----------------------------------------------------------------------
diff --git a/BUILDING.txt b/BUILDING.txt
index 2aeade4..ee6e680 100644
--- a/BUILDING.txt
+++ b/BUILDING.txt
@@ -14,6 +14,8 @@ Requirements:
 * Jansson C XML parsing library ( if compiling libwebhdfs )
 * Linux FUSE (Filesystem in Userspace) version 2.6 or above ( if compiling fuse_dfs )
 * Internet connection for first build (to fetch all Maven and Hadoop dependencies)
+* python (for releasedocs)
+* bats (for shell code testing)
 
 ----------------------------------------------------------------------------------
 The easiest way to get an environment with all the appropriate tools is by means
@@ -106,7 +108,7 @@ Maven build goals:
 
  * Clean                     : mvn clean [-Preleasedocs]
  * Compile                   : mvn compile [-Pnative]
- * Run tests                 : mvn test [-Pnative]
+ * Run tests                 : mvn test [-Pnative] [-Pshelltest]
  * Create JAR                : mvn package
  * Run findbugs              : mvn compile findbugs:findbugs
  * Run checkstyle            : mvn compile checkstyle:checkstyle

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/dev-support/docker/Dockerfile
----------------------------------------------------------------------
diff --git a/dev-support/docker/Dockerfile b/dev-support/docker/Dockerfile
index f761f8b..c8453cc 100644
--- a/dev-support/docker/Dockerfile
+++ b/dev-support/docker/Dockerfile
@@ -63,6 +63,14 @@ ENV FINDBUGS_HOME /opt/findbugs
 RUN apt-get install -y cabal-install
 RUN cabal update && cabal install shellcheck --global
 
+#####
+# bats
+#####
+
+RUN add-apt-repository ppa:duggan/bats --yes
+RUN apt-get update -qq
+RUN apt-get install -qq bats
+
 # Fixing the Apache commons / Maven dependency problem under Ubuntu:
 # See http://wiki.apache.org/commons/VfsProblems
 RUN cd /usr/share/maven/lib && ln -s ../../java/commons-lang.jar .

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/CHANGES.txt
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 8d0795b..5020e91 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -51,6 +51,8 @@ Trunk (Unreleased)
     HADOOP-7947. Validate XMLs if a relevant tool is available, when using
     scripts (Kengo Seki via aw)
 
+    HADOOP-10854. unit tests for the shell scripts (aw)
+
   IMPROVEMENTS
 
     HADOOP-11203. Allow ditscp to accept bandwitdh in fraction MegaBytes

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/pom.xml
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 6b1388a..282735d 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -958,6 +958,39 @@
       </build>
     </profile>
 
+    <!-- profile to test shell code -->
+    <profile>
+      <id>shelltest</id>
+      <activation>
+        <activeByDefault>true</activeByDefault>
+      </activation>
+      <build>
+        <plugins>
+          <plugin>
+            <artifactId>maven-antrun-plugin</artifactId>
+            <executions>
+                <execution>
+                    <id>common-test-bats-driver</id>
+                    <phase>process-test-classes</phase>
+                    <goals>
+                        <goal>run</goal>
+                    </goals>
+                    <configuration>
+                      <target>
+                          <exec dir="src/test/scripts"
+                           executable="bash"
+                           failonerror="true">
+                           <arg value="./run-bats.sh" />
+                         </exec>
+                      </target>
+                    </configuration>
+                </execution>
+            </executions>
+          </plugin>
+        </plugins>
+      </build>
+    </profile>
+
   </profiles>
 </project>
 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
index 5e2a2e8..b9b7919 100755
--- a/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
+++ b/hadoop-common-project/hadoop-common/src/main/bin/hadoop-functions.sh
@@ -358,6 +358,7 @@ function hadoop_import_shellprofiles
 
   if [[ -d "${HADOOP_LIBEXEC_DIR}/shellprofile.d" ]]; then
     files1=(${HADOOP_LIBEXEC_DIR}/shellprofile.d/*.sh)
+    hadoop_debug "shellprofiles: ${files1[*]}"
   else
     hadoop_error "WARNING: ${HADOOP_LIBEXEC_DIR}/shellprofile.d doesn't exist. Functionality may not work."
   fi
@@ -368,7 +369,8 @@ function hadoop_import_shellprofiles
 
   for i in "${files1[@]}" "${files2[@]}"
   do
-    if [[ -n "${i}" ]]; then
+    if [[ -n "${i}"
+      && -f "${i}" ]]; then
       hadoop_debug "Profiles: importing ${i}"
       . "${i}"
     fi
@@ -490,6 +492,26 @@ function hadoop_basic_init
     export HADOOP_MAPRED_HOME="${HADOOP_PREFIX}"
   fi
 
+  if [[ ! -d "${HADOOP_COMMON_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_COMMON_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_HDFS_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_HDFS_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_YARN_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_YARN_HOME"
+    exit 1
+  fi
+
+  if [[ ! -d "${HADOOP_MAPRED_HOME}" ]]; then
+    hadoop_error "ERROR: Invalid HADOOP_MAPRED_HOME"
+    exit 1
+  fi
+
   HADOOP_IDENT_STRING=${HADOOP_IDENT_STRING:-$USER}
   HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-"${HADOOP_PREFIX}/logs"}
   HADOOP_LOGFILE=${HADOOP_LOGFILE:-hadoop.log}
@@ -670,7 +692,7 @@ function hadoop_common_slave_mode_execute
   # to prevent loops
   # Also remove --hostnames and --hosts along with arg values
   local argsSize=${#argv[@]};
-  for (( i = 0; i < $argsSize; i++ ))
+  for (( i = 0; i < argsSize; i++ ))
   do
     if [[ "${argv[$i]}" =~ ^--slaves$ ]]; then
       unset argv[$i]
@@ -681,6 +703,10 @@ function hadoop_common_slave_mode_execute
       unset argv[$i];
     fi
   done
+  if [[ ${QATESTMODE} = true ]]; then
+    echo "${argv[@]}"
+    return
+  fi
   hadoop_connect_to_hosts -- "${argv[@]}"
 }
 
@@ -727,8 +753,12 @@ function hadoop_add_param
   # delimited
   #
   if [[ ! ${!1} =~ $2 ]] ; then
-    # shellcheck disable=SC2086
-    eval $1="'${!1} $3'"
+    #shellcheck disable=SC2140
+    eval "$1"="'${!1} $3'"
+    if [[ ${!1:0:1} = ' ' ]]; then
+      #shellcheck disable=SC2140
+      eval "$1"="'${!1# }'"
+    fi
     hadoop_debug "$1 accepted $3"
   else
     hadoop_debug "$1 declined $3"
@@ -766,7 +796,8 @@ function hadoop_add_classpath
   # for wildcard at end, we can
   # at least check the dir exists
   if [[ $1 =~ ^.*\*$ ]]; then
-    local mp=$(dirname "$1")
+    local mp
+    mp=$(dirname "$1")
     if [[ ! -d "${mp}" ]]; then
       hadoop_debug "Rejected CLASSPATH: $1 (not a dir)"
       return 1
@@ -825,7 +856,7 @@ function hadoop_add_colonpath
       hadoop_debug "Prepend colonpath($1): $2"
     else
       # shellcheck disable=SC2086
-      eval $1+="'$2'"
+      eval $1+=":'$2'"
       hadoop_debug "Append colonpath($1): $2"
     fi
     return 0
@@ -864,11 +895,14 @@ function hadoop_add_javalibpath
 ## @return       1 = failure (doesn't exist or some other reason)
 function hadoop_add_ldlibpath
 {
+  local status
   # specialized function for a common use case
   hadoop_add_colonpath LD_LIBRARY_PATH "$1" "$2"
+  status=$?
 
   # note that we export this
   export LD_LIBRARY_PATH
+  return ${status}
 }
 
 ## @description  Add the common/core Hadoop components to the
@@ -876,21 +910,29 @@ function hadoop_add_ldlibpath
 ## @audience     private
 ## @stability    evolving
 ## @replaceable  yes
+## @returns      1 on failure, may exit
+## @returns      0 on success
 function hadoop_add_common_to_classpath
 {
   #
   # get all of the common jars+config in the path
   #
 
+  if [[ -z "${HADOOP_COMMON_HOME}"
+    || -z "${HADOOP_COMMON_DIR}"
+    || -z "${HADOOP_COMMON_LIB_JARS_DIR}" ]]; then
+    hadoop_debug "COMMON_HOME=${HADOOP_COMMON_HOME}"
+    hadoop_debug "COMMON_DIR=${HADOOP_COMMON_DIR}"
+    hadoop_debug "COMMON_LIB_JARS_DIR=${HADOOP_COMMON_LIB_JARS_DIR}"
+    hadoop_error "ERROR: HADOOP_COMMON_HOME or related vars are not configured."
+    exit 1
+  fi
+
   # developers
   if [[ -n "${HADOOP_ENABLE_BUILD_PATHS}" ]]; then
     hadoop_add_classpath "${HADOOP_COMMON_HOME}/hadoop-common/target/classes"
   fi
 
-  if [[ -d "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}/webapps" ]]; then
-    hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"
-  fi
-
   hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_LIB_JARS_DIR}"'/*'
   hadoop_add_classpath "${HADOOP_COMMON_HOME}/${HADOOP_COMMON_DIR}"'/*'
 }
@@ -909,27 +951,27 @@ function hadoop_add_to_classpath_userpath
   # set env-var HADOOP_USER_CLASSPATH_FIRST
   # we'll also dedupe it, because we're cool like that.
   #
-  local c
-  local array
-  local i
-  local j
-  let c=0
+  declare -a array
+  declare -i c=0
+  declare -i j
+  declare -i i
+  declare idx
 
   if [[ -n "${HADOOP_CLASSPATH}" ]]; then
     # I wonder if Java runs on VMS.
-    for i in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
-      array[$c]=$i
-      let c+=1
+    for idx in $(echo "${HADOOP_CLASSPATH}" | tr : '\n'); do
+      array[${c}]=${idx}
+      ((c=c+1))
     done
-    let j=c-1
+    ((j=c-1))
 
     if [[ -z "${HADOOP_USE_CLIENT_CLASSLOADER}" ]]; then
       if [[ -z "${HADOOP_USER_CLASSPATH_FIRST}" ]]; then
-        for ((i=j; i>=0; i--)); do
+        for ((i=0; i<=j; i++)); do
           hadoop_add_classpath "${array[$i]}" after
         done
       else
-        for ((i=0; i<=j; i++)); do
+        for ((i=j; i>=0; i--)); do
           hadoop_add_classpath "${array[$i]}" before
         done
       fi
@@ -951,18 +993,32 @@ function hadoop_os_tricks
     Darwin)
       if [[ -z "${JAVA_HOME}" ]]; then
         if [[ -x /usr/libexec/java_home ]]; then
-          export JAVA_HOME="$(/usr/libexec/java_home)"
+          JAVA_HOME="$(/usr/libexec/java_home)"
+          export JAVA_HOME
         else
-          export JAVA_HOME=/Library/Java/Home
+          JAVA_HOME=/Library/Java/Home
+          export JAVA_HOME
         fi
       fi
     ;;
     Linux)
-      bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
+
+      # Newer versions of glibc use an arena memory allocator that
+      # causes virtual # memory usage to explode. This interacts badly
+      # with the many threads that we use in Hadoop. Tune the variable
+      # down to prevent vmem explosion.
+      export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
+      # we put this in QA test mode off so that non-Linux can test
+      if [[ "${QATESTMODE}" = true ]]; then
+        return
+      fi
 
       # NOTE! HADOOP_ALLOW_IPV6 is a developer hook.  We leave it
       # undocumented in hadoop-env.sh because we don't want users to
       # shoot themselves in the foot while devs make IPv6 work.
+
+      bindv6only=$(/sbin/sysctl -n net.ipv6.bindv6only 2> /dev/null)
+
       if [[ -n "${bindv6only}" ]] &&
          [[ "${bindv6only}" -eq "1" ]] &&
          [[ "${HADOOP_ALLOW_IPV6}" != "yes" ]]; then
@@ -971,11 +1027,6 @@ function hadoop_os_tricks
         hadoop_error "ERROR: For more info: http://wiki.apache.org/hadoop/HadoopIPv6"
         exit 1
       fi
-      # Newer versions of glibc use an arena memory allocator that
-      # causes virtual # memory usage to explode. This interacts badly
-      # with the many threads that we use in Hadoop. Tune the variable
-      # down to prevent vmem explosion.
-      export MALLOC_ARENA_MAX=${MALLOC_ARENA_MAX:-4}
     ;;
     CYGWIN*)
       # Flag that we're running on Cygwin to trigger path translation later.
@@ -1019,7 +1070,7 @@ function hadoop_finalize_libpaths
   if [[ -n "${JAVA_LIBRARY_PATH}" ]]; then
     hadoop_translate_cygwin_path JAVA_LIBRARY_PATH
     hadoop_add_param HADOOP_OPTS java.library.path \
-    "-Djava.library.path=${JAVA_LIBRARY_PATH}"
+      "-Djava.library.path=${JAVA_LIBRARY_PATH}"
     export LD_LIBRARY_PATH
   fi
 }
@@ -1168,6 +1219,7 @@ function hadoop_exit_with_usage
   if [[ -z $exitcode ]]; then
     exitcode=1
   fi
+  # shellcheck disable=SC2034
   if declare -F hadoop_usage >/dev/null ; then
     hadoop_usage
   elif [[ -x /usr/bin/cowsay ]]; then
@@ -1464,6 +1516,7 @@ function hadoop_start_secure_daemon
   hadoop_rotate_log "${daemonoutfile}"
   hadoop_rotate_log "${daemonerrfile}"
 
+  # shellcheck disable=SC2153
   jsvc="${JSVC_HOME}/jsvc"
   if [[ ! -f "${jsvc}" ]]; then
     hadoop_error "JSVC_HOME is not set or set incorrectly. jsvc is required to run secure"
@@ -1490,6 +1543,7 @@ function hadoop_start_secure_daemon
     hadoop_error "ERROR:  Cannot write ${daemonname} pid ${privpidfile}."
   fi
 
+  # shellcheck disable=SC2086
   exec "${jsvc}" \
     "-Dproc_${daemonname}" \
     -outfile "${daemonoutfile}" \

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
new file mode 100755
index 0000000..f718345
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop-functions_test_helper.bash
@@ -0,0 +1,56 @@
+#!/bin/bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+setup() {
+
+  TMP=../../../target/test-dir/bats.$$.${RANDOM}
+  mkdir -p ${TMP}
+  TMP=$(cd -P -- "${TMP}" >/dev/null && pwd -P)
+  export TMP
+  TESTBINDIR=$(cd -P -- "$(pwd)" >/dev/null && pwd -P)
+  HADOOP_LIBEXEC_DIR=${TESTBINDIR}/../../main/bin
+  HADOOP_LIBEXEC_DIR=$(cd -P -- "${HADOOP_LIBEXEC_DIR}" >/dev/null && pwd -P)
+
+  # shellcheck disable=SC2034
+  HADOOP_SHELL_SCRIPT_DEBUG=true
+  unset HADOOP_CONF_DIR
+  unset HADOOP_HOME
+  unset HADOOP_PREFIX
+
+  echo "bindir: ${TESTBINDIR}" 2>&1
+
+  mkdir -p "${TMP}"
+
+  # shellcheck disable=SC2034
+  QATESTMODE=true
+
+  . ../../main/bin/hadoop-functions.sh
+  pushd "${TMP}" >/dev/null
+}
+
+teardown() {
+  popd >/dev/null
+  rm -rf "${TMP}"
+}
+
+
+strstr() {
+  if [ "${1#*$2}" != "${1}" ]; then
+    echo true
+  else
+    echo false
+  fi
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
new file mode 100644
index 0000000..8bc50d0
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_classpath.bats
@@ -0,0 +1,100 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_classpath (simple not exist)" {
+  run hadoop_add_classpath testvar
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_classpath (simple wildcard not exist)" {
+  run hadoop_add_classpath testvar/*
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_classpath (simple exist)" {
+  run hadoop_add_classpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_classpath (simple wildcard exist)" {
+  run hadoop_add_classpath "${TMP}/*"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_classpath (simple dupecheck)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "${TMP}/*"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (default order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (after order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" after
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (before order)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" before
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (simple dupecheck 2)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp"
+  hadoop_add_classpath "${TMP}/*"
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/*:/tmp" ]
+}
+
+@test "hadoop_add_classpath (dupecheck 3)" {
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" before
+  hadoop_add_classpath "${TMP}/*"
+  hadoop_add_classpath "/tmp" after
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "/tmp:${TMP}/*" ]
+}
+
+@test "hadoop_add_classpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_classpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${CLASSPATH}<"
+  [ "${CLASSPATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
new file mode 100644
index 0000000..e6c59ad
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_colonpath.bats
@@ -0,0 +1,96 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_colonpath (simple not exist)" {
+  run hadoop_add_colonpath testvar
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_colonpath (simple exist)" {
+  run hadoop_add_colonpath testvar "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_colonpath (simple dupecheck)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "${TMP}"
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (default order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp"
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (after order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" after
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (before order)" {
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" before
+  echo ">${testvar}<"
+  [ "${testvar}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp"
+  hadoop_add_colonpath testvar "${TMP}"
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_colonpath (dupecheck 3)" {
+  set +e
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" before
+  hadoop_add_colonpath testvar "${TMP}"
+  hadoop_add_colonpath testvar "/tmp" after
+  set -e
+  echo ">${testvar}<"
+  [ "${testvar}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_colonpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_colonpath testvar "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${testvar}<"
+  [ "${testvar}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
new file mode 100644
index 0000000..14e75a6
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_common_to_classpath.bats
@@ -0,0 +1,71 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+freetheclasses () {
+  local j
+
+  for j in HADOOP_CLASSPATH  \
+        HADOOP_ENABLE_BUILD_PATHS \
+        CLASSPATH HADOOP_COMMON_DIR \
+        HADOOP_COMMON_HOME \
+        HADOOP_COMMON_LIB_JARS_DIR \
+        HADOOP_ENABLE_BUILD_PATHS ; do
+      unset ${!j}
+  done
+}
+
+createdirs () {
+  local j
+
+  for j in hadoop-common/target/classes \
+           commondir/webapps commonlibjars ; do
+    mkdir -p "${TMP}/${j}"
+    touch "${TMP}/${j}/fake.jar"
+  done
+  HADOOP_COMMON_HOME=${TMP}
+  HADOOP_COMMON_DIR=commondir
+  HADOOP_COMMON_LIB_JARS_DIR=commonlibjars
+}
+
+@test "hadoop_add_common_to_classpath (negative)" {
+   freetheclasses
+   createdirs
+   unset HADOOP_COMMON_HOME
+   run hadoop_add_common_to_classpath
+   [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_add_common_to_classpath (positive)" {
+   freetheclasses
+   createdirs
+   set +e
+   hadoop_add_common_to_classpath
+   set -e
+   echo ">${CLASSPATH}<"
+   [ "${CLASSPATH}" = "${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
+}
+
+@test "hadoop_add_common_to_classpath (build paths)" {
+   freetheclasses
+   createdirs
+   HADOOP_ENABLE_BUILD_PATHS=true
+   set +e
+   hadoop_add_common_to_classpath
+   set -e
+   echo ">${CLASSPATH}<"
+   [ "${CLASSPATH}" = "${TMP}/hadoop-common/target/classes:${TMP}/commonlibjars/*:${TMP}/commondir/*" ]
+ }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
new file mode 100644
index 0000000..b17b546
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_javalibpath.bats
@@ -0,0 +1,98 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_javalibpath (simple not exist)" {
+  run hadoop_add_javalibpath "${TMP}/foo"
+  [ "${status}" -eq 1 ]
+}
+
+
+@test "hadoop_add_javalibpath (simple exist)" {
+  run hadoop_add_javalibpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+
+@test "hadoop_add_javalibpath (simple dupecheck)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "${TMP}"
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (default order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp"
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (after order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" after
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (before order)" {
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" before
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp"
+  hadoop_add_javalibpath "${TMP}"
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_javalibpath (dupecheck 3)" {
+  set +e
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" before
+  hadoop_add_javalibpath "${TMP}"
+  hadoop_add_javalibpath "/tmp" after
+  set -e
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_javalibpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_javalibpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${JAVA_LIBRARY_PATH}<"
+  [ "${JAVA_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
new file mode 100644
index 0000000..4f909e2
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_ldlibpath.bats
@@ -0,0 +1,97 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_ldlibpath (simple not exist)" {
+  run hadoop_add_ldlibpath ${TMP}/foo
+  [ "${status}" -eq 1 ]
+}
+
+
+@test "hadoop_add_ldlibpath (simple exist)" {
+  run hadoop_add_ldlibpath "${TMP}"
+  [ "${status}" -eq 0 ]
+}
+
+@test "hadoop_add_ldlibpath (simple dupecheck)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "${TMP}"
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (default order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp"
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (after order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" after
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (before order)" {
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" before
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (simple dupecheck 2)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp"
+  hadoop_add_ldlibpath "${TMP}"
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}:/tmp" ]
+}
+
+@test "hadoop_add_ldlibpath (dupecheck 3)" {
+  set +e
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" before
+  hadoop_add_ldlibpath "${TMP}"
+  hadoop_add_ldlibpath "/tmp" after
+  set -e
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "/tmp:${TMP}" ]
+}
+
+@test "hadoop_add_ldlibpath (complex ordering)" {
+  local j
+  local style="after"
+
+  # 1 -> 2:1 -> 2:1:3 -> 4:2:1:3 -> 4:2:1:3:5
+
+  for j in {1..5}; do
+    mkdir ${TMP}/${j}
+    hadoop_add_ldlibpath "${TMP}/${j}" "${style}"
+    if [ "${style}" = "after" ]; then
+      style=before
+    else
+      style=after
+    fi
+  done
+  echo ">${LD_LIBRARY_PATH}<"
+  [ "${LD_LIBRARY_PATH}" = "${TMP}/4:${TMP}/2:${TMP}/1:${TMP}/3:${TMP}/5" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
new file mode 100644
index 0000000..5d65db0
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_param.bats
@@ -0,0 +1,49 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_add_param (positive 1)" {
+  hadoop_add_param testvar foo foo
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo" ]
+}
+
+@test "hadoop_add_param (negative)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo" ]
+}
+
+@test "hadoop_add_param (positive 2)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar bar bar
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo bar" ]
+}
+
+@test "hadoop_add_param (positive 3)" {
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar foo foo
+  hadoop_add_param testvar bar bar
+  hadoop_add_param testvar bar bar
+  hadoop_add_param testvar baz baz
+  hadoop_add_param testvar baz baz
+
+  echo ">${testvar}<"
+  [ "${testvar}" = "foo bar baz" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
new file mode 100644
index 0000000..4d6667f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_add_to_classpath_userpath.bats
@@ -0,0 +1,98 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+freetheclasses () {
+  local j
+
+  for j in HADOOP_CLASSPATH  \
+      HADOOP_USE_CLIENT_CLASSLOADER \
+      HADOOP_USER_CLASSPATH_FIRST \
+      CLASSPATH; do
+      unset ${!j}
+  done
+}
+
+createdirs () {
+  local j
+
+  for j in new old foo bar baz; do
+    mkdir -p "${TMP}/${j}"
+  done
+}
+
+@test "hadoop_add_to_classpath_userpath (nothing)" {
+   freetheclasses
+   hadoop_add_to_classpath_userpath
+   [ -z "${CLASSPATH}" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (none)" {
+   freetheclasses
+   CLASSPATH=test
+   hadoop_add_to_classpath_userpath
+   [ "${CLASSPATH}" = "test" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (only)" {
+   freetheclasses
+   createdirs
+   HADOOP_CLASSPATH="${TMP}/new"
+   hadoop_add_to_classpath_userpath
+   [ "${CLASSPATH}" = "${TMP}/new" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (classloader)" {
+   freetheclasses
+   createdirs
+   HADOOP_CLASSPATH="${TMP}/new"
+   HADOOP_USE_CLIENT_CLASSLOADER="true"
+   hadoop_add_to_classpath_userpath
+   [ -z "${CLASSPATH}" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (1+1 dupe)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo
+   HADOOP_CLASSPATH=${TMP}/foo
+   HADOOP_USER_CLASSPATH_FIRST=""
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/foo" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (3+2 after)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
+   HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
+   HADOOP_USER_CLASSPATH_FIRST=""
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/foo:${TMP}/bar:${TMP}/baz:${TMP}/new:${TMP}/old" ]
+}
+
+@test "hadoop_add_to_classpath_userpath (3+2 before)" {
+   freetheclasses
+   createdirs
+   CLASSPATH=${TMP}/foo:${TMP}/bar:${TMP}/baz
+   HADOOP_CLASSPATH=${TMP}/new:${TMP}/old
+   HADOOP_USER_CLASSPATH_FIRST="true"
+   hadoop_add_to_classpath_userpath
+   echo ">${CLASSPATH}<"
+   [ ${CLASSPATH} = "${TMP}/new:${TMP}/old:${TMP}/foo:${TMP}/bar:${TMP}/baz" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
new file mode 100644
index 0000000..74e2497
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_basic_init.bats
@@ -0,0 +1,94 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+
+
+basicinitsetup () {
+  local j
+
+  testvars="HADOOP_IDENT_STRING \
+        HADOOP_LOG_DIR \
+        HADOOP_LOGFILE \
+        HADOOP_LOGLEVEL \
+        HADOOP_NICENESS \
+        HADOOP_STOP_TIMEOUT \
+        HADOOP_PID_DIR \
+        HADOOP_ROOT_LOGGER \
+        HADOOP_DAEMON_ROOT_LOGGER \
+        HADOOP_SECURITY_LOGGER \
+        HADOOP_SSH_OPTS \
+        HADOOP_SECURE_LOG_DIR \
+        HADOOP_SECURE_PID_DIR \
+        HADOOP_SSH_PARALLEL"
+
+  dirvars="HADOOP_COMMON_HOME \
+        HADOOP_MAPRED_HOME \
+        HADOOP_HDFS_HOME \
+        HADOOP_YARN_HOME"
+
+  for j in ${testvars}; do
+    unset ${!j}
+  done
+
+  HADOOP_PREFIX=${TMP}
+}
+
+check_var_values () {
+  for j in ${testvars}; do
+    echo "Verifying ${j} has a value"
+    [ -n "${!j}" ]
+  done
+}
+
+@test "hadoop_basic_init (bad dir errors)" {
+  local j
+  local i
+  # we need to do these in the same order for
+  # the unit test, so that the tests are easier
+  # to write/test
+  basicinitsetup
+  for j in ${dirvars}; do
+    echo "testing ${j}"
+    i=${TMP}/${j}
+    mkdir -p "${i}"
+    #shellcheck disable=SC2086
+    eval ${j}=${i}
+    hadoop_basic_init
+    echo "Verifying $j has >${i}< >${!j}<"
+    [ ${!j} = ${i} ]
+  done
+}
+
+
+@test "hadoop_basic_init (no non-dir overrides)" {
+  basicinitsetup
+  hadoop_basic_init
+  check_var_values
+}
+
+@test "hadoop_basic_init (test non-dir overrides)" {
+  local j
+  for j in ${testvars}; do
+    basicinitsetup
+    echo testing ${j}
+    eval ${j}=foo
+    hadoop_basic_init
+    check_var_values
+    echo "Verifying $j has foo >${!j}<"
+    [ ${j} = foo ]
+  done
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
new file mode 100644
index 0000000..0fd5d21
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_bootstrap.bats
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_deprecate_envvar (no libexec)" {
+  unset HADOOP_LIBEXEC_DIR
+  run hadoop_bootstrap
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_deprecate_envvar (libexec)" {
+  unset   HADOOP_PREFIX
+  unset   HADOOP_COMMON_DIR
+  unset   HADOOP_COMMON_LIB_JARS_DIR
+  unset   HDFS_DIR
+  unset   HDFS_LIB_JARS_DIR
+  unset   YARN_DIR
+  unset   YARN_LIB_JARS_DIR
+  unset   MAPRED_DIR
+  unset   MAPRED_LIB_JARS_DIR
+  unset   TOOL_PATH
+  unset   HADOOP_OS_TYPE
+
+  hadoop_bootstrap
+
+  # all of these should be set
+  [ -n ${HADOOP_PREFIX} ]
+  [ -n ${HADOOP_COMMON_DIR} ]
+  [ -n ${HADOOP_COMMON_LIB_JARS_DIR} ]
+  [ -n ${HDFS_DIR} ]
+  [ -n ${HDFS_LIB_JARS_DIR} ]
+  [ -n ${YARN_DIR} ]
+  [ -n ${YARN_LIB_JARS_DIR} ]
+  [ -n ${MAPRED_DIR} ]
+  [ -n ${MAPRED_LIB_JARS_DIR} ]
+  [ -n ${TOOL_PATH} ]
+  [ -n ${HADOOP_OS_TYPE} ]
+} 

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
new file mode 100644
index 0000000..3e42da9
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_confdir.bats
@@ -0,0 +1,92 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+create_fake_dirs () {
+  HADOOP_PREFIX=${TMP}
+  for j in conf etc/hadoop; do
+    mkdir -p "${HADOOP_PREFIX}/${j}"
+    echo "unittest=${j}" > "${HADOOP_PREFIX}/${j}/hadoop-env.sh"
+  done
+}
+
+@test "hadoop_find_confdir (default)" {
+  create_fake_dirs
+  hadoop_find_confdir
+  [ -n "${HADOOP_CONF_DIR}" ]
+}
+
+@test "hadoop_find_confdir (bw compat: conf)" {
+  create_fake_dirs
+  hadoop_find_confdir
+  echo ">${HADOOP_CONF_DIR}< >${HADOOP_PREFIX}/conf<"
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/conf ]
+}
+
+@test "hadoop_find_confdir (etc/hadoop)" {
+  create_fake_dirs
+  rm -rf "${HADOOP_PREFIX}/conf"
+  hadoop_find_confdir
+  [ "${HADOOP_CONF_DIR}" = ${HADOOP_PREFIX}/etc/hadoop ]
+}
+
+@test "hadoop_verify_confdir (negative) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  run hadoop_verify_confdir
+  [ -n "${output}" ]
+}
+
+@test "hadoop_verify_confdir (positive) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  touch "${HADOOP_CONF_DIR}/log4j.properties"
+  run hadoop_verify_confdir
+  [ -z "${output}" ]
+}
+
+@test "hadoop_exec_hadoopenv (positive) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  hadoop_exec_hadoopenv
+  [ -n "${HADOOP_ENV_PROCESSED}" ]
+  [ "${unittest}" = conf ]
+}
+
+@test "hadoop_exec_hadoopenv (negative) " {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  HADOOP_ENV_PROCESSED=true
+  hadoop_exec_hadoopenv
+  [ -z "${unittest}" ]
+}
+
+@test "hadoop_exec_userfuncs" {
+  create_fake_dirs
+  HADOOP_CONF_DIR=${HADOOP_PREFIX}/conf
+  echo "unittest=userfunc" > "${HADOOP_CONF_DIR}/hadoop-user-functions.sh"
+  hadoop_exec_userfuncs
+  [ "${unittest}" = "userfunc" ]
+}
+
+@test "hadoop_exec_hadooprc" {
+  HOME=${TMP}
+  echo "unittest=hadooprc" > "${TMP}/.hadooprc"
+  hadoop_exec_hadooprc
+  [ ${unittest} = "hadooprc" ]
+}
+
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
new file mode 100644
index 0000000..ae02c1f
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_deprecate_envvar.bats
@@ -0,0 +1,32 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_deprecate_envvar (replace)" {
+  OLD=value1
+  NEW=value2
+  hadoop_deprecate_envvar OLD NEW
+  [ "${NEW}" = "${OLD}" ]
+}
+
+
+@test "hadoop_deprecate_envvar (no replace)" {
+  OLD=
+  NEW=value2
+  hadoop_deprecate_envvar OLD NEW
+  [ "${NEW}" = value2 ]
+}
+

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
new file mode 100644
index 0000000..668c115
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize.bats
@@ -0,0 +1,206 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize (shellprofiles)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { testvar=shell; }
+  hadoop_finalize_classpath () { true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "shell" ];
+}
+
+@test "hadoop_finalize (classpath)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  testvar=class; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "class" ];
+}
+
+@test "hadoop_finalize (libpaths)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { testvar=libpaths; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "libpaths" ];
+}
+
+
+@test "hadoop_finalize (heap)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { testvar=heap; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "heap" ];
+}
+
+@test "hadoop_finalize (opts)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { testvar=opts; }
+  hadoop_translate_cygwin_path () { true; }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "opts" ];
+}
+
+@test "hadoop_finalize (cygwin prefix)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_PREFIX ]; then
+      testvar=prefix;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "prefix" ];
+}
+
+@test "hadoop_finalize (cygwin conf dir)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_CONF_DIR ]; then
+      testvar=confdir;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "confdir" ];
+}
+
+@test "hadoop_finalize (cygwin common)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_COMMON_HOME ]; then
+      testvar=common;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "common" ];
+}
+
+@test "hadoop_finalize (cygwin hdfs)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_HDFS_HOME ]; then
+      testvar=hdfs;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "hdfs" ];
+}
+
+@test "hadoop_finalize (cygwin yarn)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_YARN_HOME ]; then
+      testvar=yarn;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "yarn" ];
+}
+
+@test "hadoop_finalize (cygwin mapred)" {
+  HADOOP_IS_CYGWIN=false
+
+  hadoop_shellprofiles_finalize () { true; }
+  hadoop_finalize_classpath () {  true; }
+  hadoop_finalize_libpaths () { true; }
+  hadoop_finalize_hadoop_heap () { true; }
+  hadoop_finalize_hadoop_opts () { true; }
+  hadoop_translate_cygwin_path () {
+    if [ $1 = HADOOP_MAPRED_HOME ]; then
+      testvar=mapred;
+    fi
+  }
+
+  hadoop_finalize
+
+  [ "${testvar}" = "mapred" ];
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
new file mode 100644
index 0000000..d91223e
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_catalina_opts.bats
@@ -0,0 +1,56 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_catalina_opts (raw)" {
+  local j
+
+  HADOOP_IS_CYGWIN=false
+  HADOOP_CATALINA_PREFIX=test
+  CATALINA_OPTS=""
+  hadoop_finalize_catalina_opts
+  for j in test.home.dir \
+        test.config.dir \
+        test.log.dir \
+        test.admin.port \
+        test.http.port \
+        test.max.threads \
+        test.ssl.keystore.file; do
+    [ "${CATALINA_OPTS#*${j}}" != "${CATALINA_OPTS}" ]
+  done
+}
+
+@test "hadoop_finalize_catalina_opts (cygwin)" {
+  local j
+
+  skip "catalina commands not supported under cygwin yet"
+
+  HADOOP_IS_CYGWIN=true
+  HADOOP_CATALINA_PREFIX=test
+  CATALINA_OPTS=""
+
+  catalina_translate_cygwin_path () {
+    eval ${1}="foobarbaz"
+  }
+
+  hadoop_finalize_catalina_opts
+  for j in test.home.dir \
+        test.config.dir \
+        test.log.dir \
+        test.ssl.keystore.file; do
+    [ "${CATALINA_OPTS#*${j}=foobarbaz}" != "${CATALINA_OPTS}" ]
+  done
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
new file mode 100644
index 0000000..ac0d4c1
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_classpath.bats
@@ -0,0 +1,64 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_classpath (only conf dir)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+
+  [ "${CLASSPATH}" = "${TMP}" ]
+
+}
+
+@test "hadoop_finalize_classpath (before conf dir)" {
+  CLASSPATH="1"
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+
+  [ "${CLASSPATH}" = "${TMP}:1" ]
+}
+
+@test "hadoop_finalize_classpath (adds user)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+
+  hadoop_translate_cygwin_path () { true; }
+  hadoop_add_to_classpath_userpath () { testvar=true; }
+
+  hadoop_finalize_classpath
+
+  [ "${testvar}" = "true" ]
+}
+
+@test "hadoop_finalize_classpath (calls cygwin)" {
+  CLASSPATH=""
+  HADOOP_CONF_DIR="${TMP}"
+  HADOOP_IS_CYGWIN=true
+
+  hadoop_translate_cygwin_path () { [ $1 = CLASSPATH ]; }
+  hadoop_add_to_classpath_userpath () { true; }
+
+  hadoop_finalize_classpath
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
new file mode 100644
index 0000000..ef49d5b
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_heap.bats
@@ -0,0 +1,87 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+resetops () {
+  unset HADOOP_HEAPSIZE_MAX
+  unset HADOOP_HEAPSIZE
+  unset HADOOP_HEAPSIZE_MIN
+  unset HADOOP_OPTS
+}
+
+@test "hadoop_finalize_hadoop_heap (negative)" {
+  resetops
+  hadoop_finalize_hadoop_heap
+  [ -z "${HADOOP_OPTS}" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit max)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit old)" {
+  resetops
+  HADOOP_HEAPSIZE=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit max)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit old)" {
+  resetops
+  HADOOP_HEAPSIZE=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (no unit min)" {
+  resetops
+  HADOOP_HEAPSIZE_MIN=1000
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xms1000m" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (unit min)" {
+  resetops
+  HADOOP_HEAPSIZE_MIN=10g
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xms10g" ]
+}
+
+@test "hadoop_finalize_hadoop_heap (dedupe)" {
+  resetops
+  HADOOP_HEAPSIZE_MAX=1000
+  HADOOP_OPTS="-Xmx5g"
+  hadoop_finalize_hadoop_heap
+  hadoop_finalize_hadoop_heap
+  echo ">${HADOOP_OPTS}<"
+  [ "${HADOOP_OPTS}" = "-Xmx5g" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
new file mode 100644
index 0000000..3acb1a5
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_hadoop_opts.bats
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_hadoop_opts (raw)" {
+  local j
+
+  HADOOP_IS_CYGWIN=false
+  HADOOP_OPTS=""
+  hadoop_finalize_hadoop_opts
+  for j in hadoop.log.dir \
+        hadoop.log.file \
+        hadoop.home.dir \
+        hadoop.root.logger \
+        hadoop.policy.file \
+        hadoop.security.logger \
+        hadoop.id.str; do
+
+    [ "${HADOOP_OPTS#*${j}}" != "${HADOOP_OPTS}" ]
+  done
+}
+
+@test "hadoop_finalize_hadoop_opts (cygwin)" {
+  local j
+
+  HADOOP_IS_CYGWIN=true
+  HADOOP_OPTS=""
+
+  hadoop_translate_cygwin_path () {
+    eval ${1}="foobarbaz"
+  }
+
+  hadoop_finalize_hadoop_opts
+  for j in hadoop.log.dir \
+        hadoop.home.dir; do
+    echo "${j} from >${HADOOP_OPTS}<"
+    [ "${HADOOP_OPTS#*${j}=foobarbaz}" != "${HADOOP_OPTS}" ]
+  done
+}

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
new file mode 100644
index 0000000..48ba773
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_finalize_libpaths.bats
@@ -0,0 +1,30 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_finalize_libpaths (negative)" {
+  unset JAVA_LIBRARY_PATH
+  unset HADOOP_OPTS
+  hadoop_finalize_libpaths
+  [ -z "${HADOOP_OPTS}" ]
+}
+
+@test "hadoop_finalize_libpaths (positive)" {
+  JAVA_LIBRARY_PATH=test
+  unset HADOOP_OPTS
+  hadoop_finalize_libpaths
+  [ "${HADOOP_OPTS}" = "-Djava.library.path=test" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
new file mode 100644
index 0000000..5a6ee10
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_java_setup.bats
@@ -0,0 +1,47 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_java_setup (negative not set)" {
+  unset JAVA_HOME
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (negative not a dir)" {
+  touch ${TMP}/foo
+  JAVA_HOME="${TMP}/foo"
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (negative not exec)" {
+  mkdir -p "${TMP}/bin"
+  touch "${TMP}/bin/java"
+  JAVA_HOME="${TMP}"
+  chmod a-x "${TMP}/bin/java"
+  run hadoop_java_setup
+  [ "${status}" -eq 1 ]
+}
+
+@test "hadoop_java_setup (positive)" {
+  mkdir -p "${TMP}/bin"
+  touch "${TMP}/bin/java"
+  JAVA_HOME="${TMP}"
+  chmod a+x "${TMP}/bin/java"
+  run hadoop_java_setup
+  [ "${status}" -eq 0 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
new file mode 100644
index 0000000..ae04f72
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_os_tricks.bats
@@ -0,0 +1,34 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_os_tricks (cygwin sets cygwin)" {
+  HADOOP_OS_TYPE=CYGWIN-IS-GNU-USER-LAND
+  hadoop_os_tricks
+  [ "${HADOOP_IS_CYGWIN}" = "true" ]
+}
+
+@test "hadoop_os_tricks (linux sets arena max)" {
+  HADOOP_OS_TYPE=Linux
+  hadoop_os_tricks
+  [ -n "${MALLOC_ARENA_MAX}" ]
+}
+
+@test "hadoop_os_tricks (osx sets java_home)" {
+  HADOOP_OS_TYPE=Darwin
+  hadoop_os_tricks
+  [ -n "${JAVA_HOME}" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
new file mode 100644
index 0000000..f73fea6
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_rotate_log.bats
@@ -0,0 +1,52 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_rotate_log (defaults)" {
+  touch "${TMP}/log"
+  hadoop_rotate_log "${TMP}/log"
+  [ -f "${TMP}/log.1" ]
+  [ ! -f "${TMP}/log" ]
+}
+
+@test "hadoop_rotate_log (one archive log)" {
+  touch "${TMP}/log"
+  hadoop_rotate_log "${TMP}/log" 1
+  [ -f "${TMP}/log.1" ]
+  [ ! -f "${TMP}/log" ]
+}
+
+@test "hadoop_rotate_log (default five archive logs)" {
+  local i
+  for i in {1..5}; do
+    echo "Testing ${i}"
+    touch "${TMP}/log"
+    hadoop_rotate_log "${TMP}/log"
+    ls "${TMP}"
+    [ -f "${TMP}/log.${i}" ]
+  done
+}
+
+@test "hadoop_rotate_log (ten archive logs)" {
+  local i
+  for i in {1..10}; do
+    echo "Testing ${i}"
+    touch "${TMP}/log"
+    hadoop_rotate_log "${TMP}/log" 10
+    ls "${TMP}"
+    [ -f "${TMP}/log.${i}" ]
+  done
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
new file mode 100644
index 0000000..d6e0a25
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_shellprofile.bats
@@ -0,0 +1,91 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+shellprofilesetup () {
+  HADOOP_LIBEXEC_DIR="${TMP}/libexec"
+  HADOOP_CONF_DIR="${TMP}/conf"
+  mkdir -p "${HADOOP_LIBEXEC_DIR}/shellprofile.d" "${HADOOP_CONF_DIR}/shellprofile.d"
+}
+
+_test_hadoop_init () {
+  unittest=init
+}
+
+_test_hadoop_classpath () {
+  unittest=classpath
+}
+
+_test_hadoop_nativelib () {
+  unittest=nativelib
+}
+
+_test_hadoop_finalize () {
+  unittest=finalize
+}
+
+@test "hadoop_import_shellprofiles (negative)" {
+  shellprofilesetup
+  unset HADOOP_LIBEXEC_DIR
+  run hadoop_import_shellprofiles
+  [ -n "${output}" ]
+}
+
+@test "hadoop_import_shellprofiles (libexec sh import)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
+  hadoop_import_shellprofiles
+  [ "${unittest}" = libexec ]
+}
+
+@test "hadoop_import_shellprofiles (libexec conf sh import+override)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.sh"
+  echo "unittest=conf" > "${HADOOP_CONF_DIR}/shellprofile.d/test.sh"
+  hadoop_import_shellprofiles
+  [ "${unittest}" = conf ]
+}
+
+@test "hadoop_import_shellprofiles (libexec no cmd import)" {
+  shellprofilesetup
+  echo "unittest=libexec" > "${HADOOP_LIBEXEC_DIR}/shellprofile.d/test.cmd"
+  hadoop_import_shellprofiles
+  [ -z "${unittest}" ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_init" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_init
+  [ "${unittest}" = init ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_classpath" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_classpath
+  [ "${unittest}" = classpath ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_nativelib" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_nativelib
+  [ "${unittest}" = nativelib ]
+}
+
+@test "hadoop_add_profile+hadoop_shellprofiles_finalize" {
+  hadoop_add_profile test
+  hadoop_shellprofiles_finalize
+  [ "${unittest}" = finalize ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
new file mode 100644
index 0000000..cc33f0e
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_slaves.bats
@@ -0,0 +1,37 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_populate_slaves_file (specific file)" {
+  touch "${TMP}/file"
+  hadoop_populate_slaves_file "${TMP}/file"
+  [ "${HADOOP_SLAVES}" = "${TMP}/file" ]
+}
+
+@test "hadoop_populate_slaves_file (specific conf dir file)" {
+  HADOOP_CONF_DIR=${TMP}/1
+  mkdir -p "${HADOOP_CONF_DIR}"
+  touch "${HADOOP_CONF_DIR}/file"
+  hadoop_populate_slaves_file "file"
+  echo "${HADOOP_SLAVES}"
+  [ "${HADOOP_SLAVES}" = "${HADOOP_CONF_DIR}/file" ]
+}
+
+@test "hadoop_populate_slaves_file (no file)" {
+  HADOOP_CONF_DIR=${TMP}
+  run hadoop_populate_slaves_file "foo"
+  [ "${status}" -eq 1 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
new file mode 100644
index 0000000..53e86ce
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_ssh.bats
@@ -0,0 +1,51 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_actual_ssh" {
+  skip "Not implemented"
+  hadoop_actual_ssh
+}
+
+@test "hadoop_connect_to_hosts" {
+  skip "Not implemented"
+  hadoop_connect_to_hosts
+}
+
+@test "hadoop_connect_to_hosts_without_pdsh" {
+  skip "Not implemented"
+  hadoop_connect_to_hosts_without_pdsh
+}
+
+@test "hadoop_common_slave_mode_execute (--slaves 1)" {
+  run  hadoop_common_slave_mode_execute --slaves command
+  [ ${output} = command ]
+}
+
+@test "hadoop_common_slave_mode_execute (--slaves 2)" {
+  run  hadoop_common_slave_mode_execute --slaves command1 command2
+  [ ${output} = "command1 command2" ]
+}
+
+@test "hadoop_common_slave_mode_execute (--hosts)" {
+  run  hadoop_common_slave_mode_execute --hosts filename command
+  [ ${output} = command ]
+}
+
+@test "hadoop_common_slave_mode_execute (--hostnames 2)" {
+  run  hadoop_common_slave_mode_execute --hostnames "host1,host2" command1 command2
+  [ ${output} = "command1 command2" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
new file mode 100644
index 0000000..e5f6aec
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_translate_cygwin_path.bats
@@ -0,0 +1,48 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_translate_cygwin_path (negative)" {
+  HADOOP_IS_CYGWIN=false
+  testvar="/this/path/is/cool"
+  hadoop_translate_cygwin_path testvar
+  [ "${testvar}" = "/this/path/is/cool" ]
+}
+
+@test "hadoop_translate_cygwin_path (positive)" {
+  HADOOP_IS_CYGWIN=true
+  testvar="/this/path/is/cool"
+
+  cygpath () {
+    echo "test"
+  }
+
+  hadoop_translate_cygwin_path testvar
+  [ "${testvar}" = "test" ]
+}
+
+
+@test "hadoop_translate_cygwin_path (path positive)" {
+  HADOOP_IS_CYGWIN=true
+  testvar="/this/path/is/cool"
+
+  cygpath () {
+    echo "test"
+  }
+
+  hadoop_translate_cygwin_path testvar true
+  [ "${testvar}" = "test" ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
new file mode 100644
index 0000000..1ba5b32
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/hadoop_validate_classname.bats
@@ -0,0 +1,26 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+load hadoop-functions_test_helper
+
+@test "hadoop_validate_classname (negative)" {
+  run hadoop_validate_classname fakeclass
+  [ ${status} -eq 1 ]
+}
+
+@test "hadoop_validate_classname (positive)" {
+  run hadoop_validate_classname org.apache.hadoop.io.Text
+  [ ${status} -eq 0 ]
+}
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hadoop/blob/a890a315/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
----------------------------------------------------------------------
diff --git a/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh b/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
new file mode 100755
index 0000000..566f47a
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/scripts/run-bats.sh
@@ -0,0 +1,43 @@
+#!/usr/bin/env bash
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+targetdir=../../../target
+mkdir -p ${targetdir}/surefire-reports ${targetdir}/tap
+
+batsexe=$(which bats) 2>/dev/null
+
+if [[ -z ${batsexe} ]]; then
+  echo "not ok - no bats executable found" >  "${targetdir}/tap/shelltest.tap"
+  echo ""
+  echo ""
+  echo "ERROR: bats not installed. Skipping bash tests."
+  echo "ERROR: Please install bats as soon as possible."
+  echo ""
+  echo ""
+  exit 0
+fi
+
+for j in *.bats; do
+  echo Running bats -t "${j}"
+  bats -t "${j}" 2>&1 | tee "${targetdir}/tap/${j}.tap"
+  result=${PIPESTATUS[0]}
+  ((exitcode=exitcode+result))
+done
+
+if [[ ${exitcode} -gt 0 ]]; then
+  exit 1
+fi
+exit 0