You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by su...@apache.org on 2011/05/03 23:19:26 UTC

svn commit: r1099262 - in /hadoop/common/trunk: CHANGES.txt bin/hadoop-config.sh bin/slaves.sh

Author: suresh
Date: Tue May  3 21:19:25 2011
New Revision: 1099262

URL: http://svn.apache.org/viewvc?rev=1099262&view=rev
Log:
HADOOP-7179. Federation: Improve HDFS startup scripts. Contributed by Erik Steffl and Tanping Wang.


Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/bin/hadoop-config.sh
    hadoop/common/trunk/bin/slaves.sh

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1099262&r1=1099261&r2=1099262&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Tue May  3 21:19:25 2011
@@ -102,6 +102,9 @@ Trunk (unreleased changes)
     HADOOP-7235. Refactor the tail command to conform to new FsCommand class.
     (Daryn Sharp via szetszwo)
 
+    HADOOP-7179. Federation: Improve HDFS startup scripts. (Erik Steffl
+    and Tanping Wang via suresh)
+
   OPTIMIZATIONS
 
   BUG FIXES

Modified: hadoop/common/trunk/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop-config.sh?rev=1099262&r1=1099261&r2=1099262&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop-config.sh (original)
+++ hadoop/common/trunk/bin/hadoop-config.sh Tue May  3 21:19:25 2011
@@ -47,19 +47,40 @@ fi
 # Allow alternate conf dir location.
 export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
 
-#check to see it is specified whether to use the slaves or the
-# masters file
+# User can specify hostnames or a file where the hostnames are (not both)
+if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
+  echo \
+    "Error: Please specify one variable HADOOP_SLAVES or " \
+    "HADOOP_SLAVE_NAME and not both."
+  exit 1
+fi
+
+# Process command line options that specify hosts or file with host
+# list
 if [ $# -gt 1 ]
 then
     if [ "--hosts" = "$1" ]
     then
         shift
-        slavesfile=$1
+        export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$$1"
+        shift
+    elif [ "--hostnames" = "$1" ]
+    then
+        shift
+        export HADOOP_SLAVE_NAMES=$1
         shift
-        export HADOOP_SLAVES="${HADOOP_CONF_DIR}/$slavesfile"
     fi
 fi
 
+# User can specify hostnames or a file where the hostnames are (not both)
+# (same check as above but now we know it's command line options that cause
+# the problem)
+if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
+  echo \
+    "Error: Please specify one of --hosts or --hostnames options and not both."
+  exit 1
+fi
+
 cygwin=false
 case "`uname`" in
 CYGWIN*) cygwin=true;;

Modified: hadoop/common/trunk/bin/slaves.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/slaves.sh?rev=1099262&r1=1099261&r2=1099262&view=diff
==============================================================================
--- hadoop/common/trunk/bin/slaves.sh (original)
+++ hadoop/common/trunk/bin/slaves.sh Tue May  3 21:19:25 2011
@@ -40,24 +40,21 @@ bin=`cd "$bin"; pwd`
 
 . "$bin"/hadoop-config.sh
 
-# If the slaves file is specified in the command line,
-# then it takes precedence over the definition in 
-# hadoop-env.sh. Save it here.
-HOSTLIST=$HADOOP_SLAVES
-
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
-if [ "$HOSTLIST" = "" ]; then
-  if [ "$HADOOP_SLAVES" = "" ]; then
-    export HOSTLIST="${HADOOP_CONF_DIR}/slaves"
-  else
-    export HOSTLIST="${HADOOP_SLAVES}"
-  fi
+# Where to start the script, see hadoop-config.sh
+# (it set up the variables based on command line options)
+if [ "$HADOOP_SLAVE_NAMES" != '' ] ; then
+  SLAVE_NAMES=$HADOOP_SLAVE_NAMES
+else
+  SLAVE_FILE=${HADOOP_SLAVES:-${HADOOP_CONF_DIR}/slaves}
+  SLAVE_NAMES=$(cat "$SLAVE_FILE" | sed  's/#.*$//;/^$/d')
 fi
 
-for slave in `cat "$HOSTLIST"|sed  "s/#.*$//;/^$/d"`; do
+# start the daemons
+for slave in $SLAVE_NAMES ; do
  ssh $HADOOP_SSH_OPTS $slave $"${@// /\\ }" \
    2>&1 | sed "s/^/$slave: /" &
  if [ "$HADOOP_SLAVE_SLEEP" != "" ]; then