You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by aw...@apache.org on 2014/08/19 14:11:19 UTC

svn commit: r1618847 - in /hadoop/common/trunk/hadoop-mapreduce-project: bin/mapred bin/mapred-config.sh bin/mr-jobhistory-daemon.sh conf/mapred-env.sh

Author: aw
Date: Tue Aug 19 12:11:17 2014
New Revision: 1618847

URL: http://svn.apache.org/r1618847
Log:
HADOOP-9902. Shell script rewrite (aw)

Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred
    hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred-config.sh
    hadoop/common/trunk/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
    hadoop/common/trunk/hadoop-mapreduce-project/conf/mapred-env.sh

Modified: hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred?rev=1618847&r1=1618846&r2=1618847&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred Tue Aug 19 12:11:17 2014
@@ -15,138 +15,129 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-bin=`which $0`
-bin=`dirname ${bin}`
-bin=`cd "$bin" > /dev/null; pwd`
-
-DEFAULT_LIBEXEC_DIR="$bin"/../libexec
-HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
-if [ -e ${HADOOP_LIBEXEC_DIR}/mapred-config.sh ]; then
-  . ${HADOOP_LIBEXEC_DIR}/mapred-config.sh
-else
-  . "$bin/mapred-config.sh"
-fi
-
-function print_usage(){
-  echo "Usage: mapred [--config confdir] COMMAND"
+function hadoop_usage
+{
+  echo "Usage: mapred [--config confdir] [--daemon (start|stop|status)] COMMAND"
   echo "       where COMMAND is one of:"
-  echo "  pipes                run a Pipes job"
-  echo "  job                  manipulate MapReduce jobs"
-  echo "  queue                get information regarding JobQueues"
+  
+  echo "  archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
   echo "  classpath            prints the class path needed for running"
   echo "                       mapreduce subcommands"
-  echo "  historyserver        run job history servers as a standalone daemon"
   echo "  distcp <srcurl> <desturl> copy file or directories recursively"
-  echo "  archive -archiveName NAME -p <parent path> <src>* <dest> create a hadoop archive"
-  echo "  hsadmin              job history server admin interface"
+  echo "  job                  manipulate MapReduce jobs"
+  echo "  historyserver        run job history servers as a standalone daemon"
+  echo "  pipes                run a Pipes job"
+  echo "  queue                get information regarding JobQueues"
+  echo "  sampler              sampler"
   echo ""
   echo "Most commands print help when invoked w/o parameters."
 }
 
+this="${BASH_SOURCE-$0}"
+bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+
+# let's locate libexec...
+if [[ -n "${HADOOP_PREFIX}" ]]; then
+  DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+else
+  DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
+fi
+
+HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
+# shellcheck disable=SC2034
+HADOOP_NEW_CONFIG=true
+if [[ -f "${HADOOP_LIBEXEC_DIR}/mapred-config.sh" ]]; then
+  . "${HADOOP_LIBEXEC_DIR}/mapred-config.sh"
+else
+  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/mapred-config.sh." 2>&1
+  exit 1
+fi
+
+
 if [ $# = 0 ]; then
-  print_usage
-  exit
+  hadoop_exit_with_usage 1
 fi
 
 COMMAND=$1
 shift
 
-case $COMMAND in
-  # usage flags
-  --help|-help|-h)
-    print_usage
-    exit
-    ;;
+case ${COMMAND} in
+  mradmin|jobtracker|tasktracker|groups)
+    echo "Sorry, the ${COMMAND} command is no longer supported."
+    echo "You may find similar functionality with the \"yarn\" shell command."
+    hadoop_exit_with_usage 1
+  ;;
+  archive)
+    CLASS=org.apache.hadoop.tools.HadoopArchives
+    hadoop_add_classpath "${TOOL_PATH}"
+    HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+  ;;
+  classpath)
+    hadoop_finalize
+    echo "${CLASSPATH}"
+    exit 0
+  ;;
+  distcp)
+    CLASS=org.apache.hadoop.tools.DistCp
+    hadoop_add_classpath "${TOOL_PATH}"
+    HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+  ;;
+  historyserver)
+    daemon="true"
+    CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
+    HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_JOB_HISTORYSERVER_OPTS}"
+    if [ -n "${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}" ]; then
+      JAVA_HEAP_MAX="-Xmx${HADOOP_JOB_HISTORYSERVER_HEAPSIZE}m"
+    fi
+    HADOOP_DAEMON_ROOT_LOGGER=${HADOOP_JHS_LOGGER:-$HADOOP_DAEMON_ROOT_LOGGER}
+  ;;
+  job)
+    CLASS=org.apache.hadoop.mapred.JobClient
+  ;;
+  pipes)
+    CLASS=org.apache.hadoop.mapred.pipes.Submitter
+    HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+  ;;
+  queue)
+    CLASS=org.apache.hadoop.mapred.JobQueueClient
+  ;;
+  sampler)
+    CLASS=org.apache.hadoop.mapred.lib.InputSampler
+    HADOOP_OPTS="${HADOOP_OPTS} ${HADOOP_CLIENT_OPTS}"
+  ;;
+  -*|*)
+    hadoop_exit_with_usage 1
+  ;;
 esac
 
-if [ "$COMMAND" = "job" ] ; then
-  CLASS=org.apache.hadoop.mapred.JobClient
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "queue" ] ; then
-  CLASS=org.apache.hadoop.mapred.JobQueueClient
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "pipes" ] ; then
-  CLASS=org.apache.hadoop.mapred.pipes.Submitter
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "sampler" ] ; then
-  CLASS=org.apache.hadoop.mapred.lib.InputSampler
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "classpath" ] ; then
-  echo -n 
-elif [ "$COMMAND" = "historyserver" ] ; then
-  CLASS=org.apache.hadoop.mapreduce.v2.hs.JobHistoryServer
-  HADOOP_OPTS="$HADOOP_OPTS -Dmapred.jobsummary.logger=${HADOOP_JHS_LOGGER:-INFO,console} $HADOOP_JOB_HISTORYSERVER_OPTS"
-  if [ "$HADOOP_JOB_HISTORYSERVER_HEAPSIZE" != "" ]; then
-    JAVA_HEAP_MAX="-Xmx""$HADOOP_JOB_HISTORYSERVER_HEAPSIZE""m"
-  fi
-elif [ "$COMMAND" = "mradmin" ] \
-    || [ "$COMMAND" = "jobtracker" ] \
-    || [ "$COMMAND" = "tasktracker" ] \
-    || [ "$COMMAND" = "groups" ] ; then
-  echo "Sorry, the $COMMAND command is no longer supported."
-  echo "You may find similar functionality with the \"yarn\" shell command."
-  print_usage
-  exit 1
-elif [ "$COMMAND" = "distcp" ] ; then
-  CLASS=org.apache.hadoop.tools.DistCp
-  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "archive" ] ; then
-  CLASS=org.apache.hadoop.tools.HadoopArchives
-  CLASSPATH=${CLASSPATH}:${TOOL_PATH}
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-elif [ "$COMMAND" = "hsadmin" ] ; then
-  CLASS=org.apache.hadoop.mapreduce.v2.hs.client.HSAdmin
-  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_CLIENT_OPTS"
-else
-  echo $COMMAND - invalid command
-  print_usage
-  exit 1
-fi
+daemon_outfile="${HADOOP_LOG_DIR}/hadoop-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.out"
+daemon_pidfile="${HADOOP_PID_DIR}/hadoop-${HADOOP_IDENT_STRING}-${COMMAND}.pid"
 
-# for developers, add mapred classes to CLASSPATH
-if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
-fi
-if [ -d "$HADOOP_MAPRED_HOME/build/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build
-fi
-if [ -d "$HADOOP_MAPRED_HOME/build/test/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/test/classes
-fi
-if [ -d "$HADOOP_MAPRED_HOME/build/tools" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
-fi
 
-# for releases, add core mapred jar & webapps to CLASSPATH
-if [ -d "$HADOOP_PREFIX/${MAPRED_DIR}/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/${MAPRED_DIR}
-fi
-for f in $HADOOP_MAPRED_HOME/${MAPRED_DIR}/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-# Need YARN jars also
-for f in $HADOOP_YARN_HOME/${YARN_DIR}/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add libs to CLASSPATH
-for f in $HADOOP_MAPRED_HOME/${MAPRED_LIB_JARS_DIR}/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-# add modules to CLASSPATH
-for f in $HADOOP_MAPRED_HOME/modules/*.jar; do
-  CLASSPATH=${CLASSPATH}:$f;
-done
-
-if [ "$COMMAND" = "classpath" ] ; then
-  echo $CLASSPATH
-  exit
+if [[  "${HADOOP_DAEMON_MODE}" != "default" ]]; then
+  # shellcheck disable=SC2034
+  HADOOP_ROOT_LOGGER="${HADOOP_DAEMON_ROOT_LOGGER}"
+  hadoop_add_param HADOOP_OPTS mapred.jobsummary.logger "-Dmapred.jobsummary.logger=${HADOOP_ROOT_LOGGER}"
+  # shellcheck disable=SC2034
+  HADOOP_LOGFILE="hadoop-${HADOOP_IDENT_STRING}-${COMMAND}-${HOSTNAME}.log"
 fi
 
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,NullAppender}"
+hadoop_add_param HADOOP_OPTS Xmx "${JAVA_HEAP_MAX}"
+hadoop_finalize
 
 export CLASSPATH
-exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS $CLASS "$@"
+
+if [[ -n "${daemon}" ]]; then
+  if [[ -n "${secure_service}" ]]; then
+    hadoop_secure_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}"\
+    "${CLASS}" "${daemon_pidfile}" "${daemon_outfile}" \
+    "${priv_pidfile}" "${priv_outfile}" "${priv_errfile}" "$@"
+  else
+    hadoop_daemon_handler "${HADOOP_DAEMON_MODE}" "${COMMAND}" "${CLASS}" \
+    "${daemon_pidfile}" "${daemon_outfile}" "$@"
+  fi
+  exit $?
+else
+  hadoop_java_exec "${COMMAND}" "${CLASS}" "$@"
+fi
+

Modified: hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred-config.sh?rev=1618847&r1=1618846&r2=1618847&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred-config.sh (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/bin/mapred-config.sh Tue Aug 19 12:11:17 2014
@@ -18,35 +18,55 @@
 # included in all the mapred scripts with source command
 # should not be executed directly
 
-bin=`which "$0"`
-bin=`dirname "${bin}"`
-bin=`cd "$bin"; pwd`
+function hadoop_subproject_init
+{
+  if [ -e "${HADOOP_CONF_DIR}/mapred-env.sh" ]; then
+    . "${HADOOP_CONF_DIR}/mapred-env.sh"
+  fi
+  
+  # at some point in time, someone thought it would be a good idea to
+  # create separate vars for every subproject.  *sigh*
+  # let's perform some overrides and setup some defaults for bw compat
+  # this way the common hadoop var's == subproject vars and can be
+  # used interchangeable from here on out
+  # ...
+  # this should get deprecated at some point.
+  HADOOP_LOG_DIR="${HADOOP_MAPRED_LOG_DIR:-$HADOOP_LOG_DIR}"
+  HADOOP_MAPRED_LOG_DIR="${HADOOP_LOG_DIR}"
+  
+  HADOOP_LOGFILE="${HADOOP_MAPRED_LOGFILE:-$HADOOP_LOGFILE}"
+  HADOOP_MAPRED_LOGFILE="${HADOOP_LOGFILE}"
+  
+  HADOOP_NICENESS="${HADOOP_MAPRED_NICENESS:-$HADOOP_NICENESS}"
+  HADOOP_MAPRED_NICENESS="${HADOOP_NICENESS}"
+  
+  HADOOP_STOP_TIMEOUT="${HADOOP_MAPRED_STOP_TIMEOUT:-$HADOOP_STOP_TIMEOUT}"
+  HADOOP_MAPRED_STOP_TIMEOUT="${HADOOP_STOP_TIMEOUT}"
+  
+  HADOOP_PID_DIR="${HADOOP_MAPRED_PID_DIR:-$HADOOP_PID_DIR}"
+  HADOOP_MAPRED_PID_DIR="${HADOOP_PID_DIR}"
+  
+  HADOOP_ROOT_LOGGER="${HADOOP_MAPRED_ROOT_LOGGER:-INFO,console}"
+  HADOOP_MAPRED_ROOT_LOGGER="${HADOOP_ROOT_LOGGER}"
+  
+  HADOOP_MAPRED_HOME="${HADOOP_MAPRED_HOME:-$HADOOP_HOME_DIR}"
+  
+  HADOOP_IDENT_STRING="${HADOOP_MAPRED_IDENT_STRING:-$HADOOP_IDENT_STRING}"
+  HADOOP_MAPRED_IDENT_STRING="${HADOOP_IDENT_STRING}"
+}
+
+if [[ -z "${HADOOP_LIBEXEC_DIR}" ]]; then
+  _mc_this="${BASH_SOURCE-$0}"
+  HADOOP_LIBEXEC_DIR=$(cd -P -- "$(dirname -- "${_mc_this}")" >/dev/null && pwd -P)
+fi
 
-DEFAULT_LIBEXEC_DIR="$bin"/../libexec
-HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
-if [ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]; then
+if [[ -e "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh" ]]; then
   . "${HADOOP_LIBEXEC_DIR}/hadoop-config.sh"
-elif [ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]; then
-  . "$HADOOP_COMMON_HOME"/libexec/hadoop-config.sh
-elif [ -e "${HADOOP_COMMON_HOME}/bin/hadoop-config.sh" ]; then
-  . "$HADOOP_COMMON_HOME"/bin/hadoop-config.sh
-elif [ -e "${HADOOP_HOME}/bin/hadoop-config.sh" ]; then
-  . "$HADOOP_HOME"/bin/hadoop-config.sh
-elif [ -e "${HADOOP_MAPRED_HOME}/bin/hadoop-config.sh" ]; then
-  . "$HADOOP_MAPRED_HOME"/bin/hadoop-config.sh
+elif [[ -e "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh" ]]; then
+  . "${HADOOP_COMMON_HOME}/libexec/hadoop-config.sh"
+elif [[ -e "${HADOOP_HOME}/libexec/hadoop-config.sh" ]]; then
+  . "${HADOOP_HOME}/libexec/hadoop-config.sh"
 else
   echo "Hadoop common not found."
   exit
 fi
-
-# Only set locally to use in HADOOP_OPTS. No need to export.
-# The following defaults are useful when somebody directly invokes bin/mapred.
-HADOOP_MAPRED_LOG_DIR=${HADOOP_MAPRED_LOG_DIR:-${HADOOP_MAPRED_HOME}/logs}
-HADOOP_MAPRED_LOGFILE=${HADOOP_MAPRED_LOGFILE:-hadoop.log}
-HADOOP_MAPRED_ROOT_LOGGER=${HADOOP_MAPRED_ROOT_LOGGER:-INFO,console}
-
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_MAPRED_LOG_DIR"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_MAPRED_LOGFILE"
-export HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_MAPRED_ROOT_LOGGER}"
-
-

Modified: hadoop/common/trunk/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh?rev=1618847&r1=1618846&r2=1618847&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/bin/mr-jobhistory-daemon.sh Tue Aug 19 12:11:17 2014
@@ -15,133 +15,32 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-
-#
-# Environment Variables
-#
-#   HADOOP_JHS_LOGGER  Hadoop JobSummary logger.
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_MAPRED_HOME}/conf.
-#   HADOOP_MAPRED_PID_DIR   The pid files are stored. /tmp by default.
-#   HADOOP_MAPRED_NICENESS The scheduling priority for daemons. Defaults to 0.
-##
-
-usage="Usage: mr-jobhistory-daemon.sh [--config <conf-dir>] (start|stop) <mapred-command> "
-
-# if no args specified, show usage
-if [ $# -le 1 ]; then
-  echo $usage
-  exit 1
-fi
-
-bin=`dirname "${BASH_SOURCE-$0}"`
-bin=`cd "$bin"; pwd`
-
-DEFAULT_LIBEXEC_DIR="$bin"/../libexec
-HADOOP_LIBEXEC_DIR=${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}
-if [ -e ${HADOOP_LIBEXEC_DIR}/mapred-config.sh ]; then
-  . $HADOOP_LIBEXEC_DIR/mapred-config.sh
-fi
-
-# get arguments
-startStop=$1
-shift
-command=$1
-shift
-
-hadoop_rotate_log ()
+function hadoop_usage
 {
-  log=$1;
-  num=5;
-  if [ -n "$2" ]; then
-    num=$2
-  fi
-  if [ -f "$log" ]; then # rotate logs
-    while [ $num -gt 1 ]; do
-      prev=`expr $num - 1`
-      [ -f "$log.$prev" ] && mv "$log.$prev" "$log.$num"
-      num=$prev
-    done
-    mv "$log" "$log.$num";
-  fi
+  echo "Usage: mr-jobhistory-daemon.sh [--config confdir] (start|stop|status) <hadoop-command> <args...>"
 }
 
-if [ "$HADOOP_MAPRED_IDENT_STRING" = "" ]; then
-  export HADOOP_MAPRED_IDENT_STRING="$USER"
-fi
-
-export HADOOP_MAPRED_HOME=${HADOOP_MAPRED_HOME:-${HADOOP_PREFIX}}
-export HADOOP_MAPRED_LOGFILE=mapred-$HADOOP_MAPRED_IDENT_STRING-$command-$HOSTNAME.log
-export HADOOP_MAPRED_ROOT_LOGGER=${HADOOP_MAPRED_ROOT_LOGGER:-INFO,RFA}
-export HADOOP_JHS_LOGGER=${HADOOP_JHS_LOGGER:-INFO,JSA}
-
-if [ -f "${HADOOP_CONF_DIR}/mapred-env.sh" ]; then
-  . "${HADOOP_CONF_DIR}/mapred-env.sh"
-fi
-
-mkdir -p "$HADOOP_MAPRED_LOG_DIR"
-chown $HADOOP_MAPRED_IDENT_STRING $HADOOP_MAPRED_LOG_DIR
-
-if [ "$HADOOP_MAPRED_PID_DIR" = "" ]; then
-  HADOOP_MAPRED_PID_DIR=/tmp
-fi
-
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_MAPRED_IDENT_STRING"
-
-log=$HADOOP_MAPRED_LOG_DIR/mapred-$HADOOP_MAPRED_IDENT_STRING-$command-$HOSTNAME.out
-pid=$HADOOP_MAPRED_PID_DIR/mapred-$HADOOP_MAPRED_IDENT_STRING-$command.pid
-
-HADOOP_MAPRED_STOP_TIMEOUT=${HADOOP_MAPRED_STOP_TIMEOUT:-5}
-
-# Set default scheduling priority
-if [ "$HADOOP_MAPRED_NICENESS" = "" ]; then
-  export HADOOP_MAPRED_NICENESS=0
+# let's locate libexec...
+if [[ -n "${HADOOP_PREFIX}" ]]; then
+  DEFAULT_LIBEXEC_DIR="${HADOOP_PREFIX}/libexec"
+else
+  this="${BASH_SOURCE-$0}"
+  bin=$(cd -P -- "$(dirname -- "${this}")" >/dev/null && pwd -P)
+  DEFAULT_LIBEXEC_DIR="${bin}/../libexec"
+fi
+
+HADOOP_LIBEXEC_DIR="${HADOOP_LIBEXEC_DIR:-$DEFAULT_LIBEXEC_DIR}"
+# shellcheck disable=SC2034
+HADOOP_NEW_CONFIG=true
+if [[ -f "${HADOOP_LIBEXEC_DIR}/yarn-config.sh" ]]; then
+  . "${HADOOP_LIBEXEC_DIR}/yarn-config.sh"
+else
+  echo "ERROR: Cannot execute ${HADOOP_LIBEXEC_DIR}/yarn-config.sh." 2>&1
+  exit 1
 fi
 
-case $startStop in
-
-  (start)
-
-    mkdir -p "$HADOOP_MAPRED_PID_DIR"
-
-    if [ -f $pid ]; then
-      if kill -0 `cat $pid` > /dev/null 2>&1; then
-        echo $command running as process `cat $pid`.  Stop it first.
-        exit 1
-      fi
-    fi
-
-    hadoop_rotate_log $log
-    echo starting $command, logging to $log
-    cd "$HADOOP_MAPRED_HOME"
-    nohup nice -n $HADOOP_MAPRED_NICENESS "$HADOOP_MAPRED_HOME"/bin/mapred --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
-    echo $! > $pid
-    sleep 1; head "$log"
-    ;;
-
-  (stop)
-
-    if [ -f $pid ]; then
-      TARGET_PID=`cat $pid`
-      if kill -0 $TARGET_PID > /dev/null 2>&1; then
-        echo stopping $command
-        kill $TARGET_PID
-        sleep $HADOOP_MAPRED_STOP_TIMEOUT
-        if kill -0 $TARGET_PID > /dev/null 2>&1; then
-          echo "$command did not stop gracefully after $HADOOP_MAPRED_STOP_TIMEOUT seconds: killing with kill -9"
-          kill -9 $TARGET_PID
-        fi
-      else
-        echo no $command to stop
-      fi
-      rm -f $pid
-    else
-      echo no $command to stop
-    fi
-    ;;
-
-  (*)
-    echo $usage
-    exit 1
-    ;;
+daemonmode=$1
+shift
 
-esac
+exec "${HADOOP_MAPRED_HOME}/bin/mapred" \
+--config "${HADOOP_CONF_DIR}" --daemon "${daemonmode}" "$@"

Modified: hadoop/common/trunk/hadoop-mapreduce-project/conf/mapred-env.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/conf/mapred-env.sh?rev=1618847&r1=1618846&r2=1618847&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/conf/mapred-env.sh (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/conf/mapred-env.sh Tue Aug 19 12:11:17 2014
@@ -13,15 +13,59 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-# export JAVA_HOME=/home/y/libexec/jdk1.6.0/
+##
+## THIS FILE ACTS AS AN OVERRIDE FOR hadoop-env.sh FOR ALL
+## WORK DONE BY THE mapred AND RELATED COMMANDS.
+##
+## Precedence rules:
+##
+## mapred-env.sh > hadoop-env.sh > hard-coded defaults
+##
+## MAPRED_xyz > HADOOP_xyz > hard-coded defaults
+##
 
-export HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+###
+# Generic settings for MapReduce
+###
 
-export HADOOP_MAPRED_ROOT_LOGGER=INFO,RFA
+#Override the log4j settings for all MR apps
+# export MAPRED_ROOT_LOGGER="INFO,console"
 
+# Override Hadoop's log directory & file
+# export HADOOP_MAPRED_LOG_DIR=""
+
+# Override Hadoop's pid directory
+# export HADOOP_MAPRED_PID_DIR=
+
+# Override Hadoop's identity string. $USER by default.
+# This is used in writing log and pid files, so keep that in mind!
+# export HADOOP_MAPRED_IDENT_STRING=$USER
+
+# Override Hadoop's process priority
+# Note that sub-processes will also run at this level!
+# export HADOOP_MAPRED_NICENESS=0
+
+###
+# Job History Server specific parameters
+###
+
+# Specify the max heapsize for the Job History Server using a numerical value
+# in the scale of MB. For example, to specify an jvm option of -Xmx1000m, set
+# the value to 1000.
+# This value will be overridden by an Xmx setting specified in either
+# MAPRED_OPTS, HADOOP_OPTS, and/or HADOOP_JOB_HISTORYSERVER_OPTS.
+# If not specified, the default value will be picked from either YARN_HEAPMAX
+# or JAVA_HEAP_MAX with YARN_HEAPMAX as the preferred option of the two.
+#
+#export HADOOP_JOB_HISTORYSERVER_HEAPSIZE=1000
+
+# Specify the JVM options to be used when starting the ResourceManager.
+# These options will be appended to the options specified as YARN_OPTS
+# and therefore may override any similar flags set in YARN_OPTS
 #export HADOOP_JOB_HISTORYSERVER_OPTS=
-#export HADOOP_MAPRED_LOG_DIR="" # Where log files are stored.  $HADOOP_MAPRED_HOME/logs by default.
-#export HADOOP_JHS_LOGGER=INFO,RFA # Hadoop JobSummary logger.
-#export HADOOP_MAPRED_PID_DIR= # The pid files are stored. /tmp by default.
-#export HADOOP_MAPRED_IDENT_STRING= #A string representing this instance of hadoop. $USER by default
-#export HADOOP_MAPRED_NICENESS= #The scheduling priority for daemons. Defaults to 0.
+
+# Specify the log4j settings for the JobHistoryServer
+#export HADOOP_JHS_LOGGER=INFO,RFA
+
+
+