You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/07/24 11:17:33 UTC

svn commit: r424973 - in /lucene/hadoop/trunk: ./ bin/

Author: cutting
Date: Mon Jul 24 02:17:31 2006
New Revision: 424973

URL: http://svn.apache.org/viewvc?rev=424973&view=rev
Log:
HADOOP-260.  Add -config option to shell scripts.  Contributed by Milind.

Added:
    lucene/hadoop/trunk/bin/hadoop-config.sh
Modified:
    lucene/hadoop/trunk/CHANGES.txt
    lucene/hadoop/trunk/bin/hadoop
    lucene/hadoop/trunk/bin/hadoop-daemon.sh
    lucene/hadoop/trunk/bin/hadoop-daemons.sh
    lucene/hadoop/trunk/bin/rcc
    lucene/hadoop/trunk/bin/slaves.sh
    lucene/hadoop/trunk/bin/start-all.sh
    lucene/hadoop/trunk/bin/start-dfs.sh
    lucene/hadoop/trunk/bin/start-mapred.sh
    lucene/hadoop/trunk/bin/stop-all.sh
    lucene/hadoop/trunk/bin/stop-dfs.sh
    lucene/hadoop/trunk/bin/stop-mapred.sh

Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/CHANGES.txt?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Mon Jul 24 02:17:31 2006
@@ -66,6 +66,9 @@
 18. HADOOP-376.  Fix datanode's HTTP server to scan for a free port.
     (omalley via cutting)
 
+19. HADOOP-260.  Add --config option to shell scripts, specifying an
+    alternate configuration directory. (Milind Bhandarkar via cutting)
+
 
 Release 0.4.0 - 2006-06-28
 

Modified: lucene/hadoop/trunk/bin/hadoop
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/hadoop?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop (original)
+++ lucene/hadoop/trunk/bin/hadoop Mon Jul 24 02:17:31 2006
@@ -16,21 +16,14 @@
 #   HADOOP_ROOT_LOGGER The root appender. Default is INFO,console
 #
 
-# resolve links - $0 may be a softlink
-THIS="$0"
-while [ -h "$THIS" ]; do
-  ls=`ls -ld "$THIS"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    THIS="$link"
-  else
-    THIS=`dirname "$THIS"`/"$link"
-  fi
-done
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+source "$bin"/hadoop-config.sh
 
 # if no args specified, show usage
 if [ $# = 0 ]; then
-  echo "Usage: hadoop COMMAND"
+  echo "Usage: hadoop [--config confdir] COMMAND"
   echo "where COMMAND is one of:"
   echo "  namenode -format  format the DFS filesystem"
   echo "  namenode          run the DFS namenode"
@@ -51,13 +44,6 @@
 # get arguments
 COMMAND=$1
 shift
-
-# some directories
-THIS_DIR=`dirname "$THIS"`
-export HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

Added: lucene/hadoop/trunk/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/hadoop-config.sh?rev=424973&view=auto
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-config.sh (added)
+++ lucene/hadoop/trunk/bin/hadoop-config.sh Mon Jul 24 02:17:31 2006
@@ -0,0 +1,33 @@
+# included in all the hadoop scripts with source command
+# should not be executable directly
+# also should not be passed any arguments, since we need original $*
+
+# resolve links - $0 may be a softlink
+this="$0"
+while [ -h "$this" ]; do
+  ls=`ls -ld "$this"`
+  link=`expr "$ls" : '.*-> \(.*\)$'`
+  if expr "$link" : '.*/.*' > /dev/null; then
+    this="$link"
+  else
+    this=`dirname "$this"`/"$link"
+  fi
+done
+
+# the root of the Hadoop installation
+export HADOOP_HOME=`dirname "$this"`/..
+
+#check to see if the conf dir is given as an optional argument
+if [ $# -gt 1 ]
+then
+    if [ "--config" = "$1" ]
+	  then
+	      shift
+	      confdir=$1
+	      shift
+	      HADOOP_CONF_DIR=$confdir
+    fi
+fi
+ 
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"

Modified: lucene/hadoop/trunk/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/hadoop-daemon.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemon.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemon.sh Mon Jul 24 02:17:31 2006
@@ -11,7 +11,7 @@
 #   HADOOP_IDENT_STRING   A string representing this instance of hadoop. $USER by default
 ##
 
-usage="Usage: hadoop-daemon [start|stop] [hadoop-command] [args...]"
+usage="Usage: hadoop-daemon.sh [--config <conf-dir>] (start|stop) <hadoop-command> <args...>"
 
 # if no args specified, show usage
 if [ $# -le 1 ]; then
@@ -19,30 +19,17 @@
   exit 1
 fi
 
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+source "$bin"/hadoop-config.sh
+
 # get arguments
 startStop=$1
 shift
 command=$1
 shift
 
-# resolve links - $0 may be a softlink
-this="$0"
-while [ -h "$this" ]; do
-  ls=`ls -ld "$this"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    this="$link"
-  else
-    this=`dirname "$this"`/"$link"
-  fi
-done
-
-# the root of the Hadoop installation
-export HADOOP_HOME=`dirname "$this"`/..
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
-
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
@@ -84,7 +71,7 @@
     fi
 
     echo starting $command, logging to $log
-    nohup "$HADOOP_HOME"/bin/hadoop $command "$@" > "$log" 2>&1 < /dev/null &
+    nohup "$HADOOP_HOME"/bin/hadoop --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     sleep 1; head "$log"
     ;;

Modified: lucene/hadoop/trunk/bin/hadoop-daemons.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/hadoop-daemons.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemons.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemons.sh Mon Jul 24 02:17:31 2006
@@ -2,7 +2,7 @@
 # 
 # Run a Hadoop command on all slave hosts.
 
-usage="Usage: hadoop-daemons.sh [start|stop] command args..."
+usage="Usage: hadoop-daemons.sh [--config confdir] [start|stop] command args..."
 
 # if no args specified, show usage
 if [ $# -le 1 ]; then
@@ -13,6 +13,6 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-HADOOP_HOME="$bin/.."
+source $bin/hadoop-config.sh
 
-exec "$bin/slaves.sh" cd "$HADOOP_HOME" \; "$bin/hadoop-daemon.sh" "$@"
+exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"

Modified: lucene/hadoop/trunk/bin/rcc
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/rcc?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/rcc (original)
+++ lucene/hadoop/trunk/bin/rcc Mon Jul 24 02:17:31 2006
@@ -11,24 +11,10 @@
 #   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #
 
-# resolve links - $0 may be a softlink
-THIS="$0"
-while [ -h "$THIS" ]; do
-  ls=`ls -ld "$THIS"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    THIS="$link"
-  else
-    THIS=`dirname "$THIS"`/"$link"
-  fi
-done
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-# some directories
-THIS_DIR=`dirname "$THIS"`
-HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+source "$bin"/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

Modified: lucene/hadoop/trunk/bin/slaves.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/slaves.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/slaves.sh (original)
+++ lucene/hadoop/trunk/bin/slaves.sh Mon Jul 24 02:17:31 2006
@@ -11,7 +11,7 @@
 #   HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
 ##
 
-usage="Usage: slaves.sh command..."
+usage="Usage: slaves.sh [--config confdir] command..."
 
 # if no args specified, show usage
 if [ $# -le 0 ]; then
@@ -19,23 +19,10 @@
   exit 1
 fi
 
-# resolve links - $0 may be a softlink
-this="$0"
-while [ -h "$this" ]; do
-  ls=`ls -ld "$this"`
-  link=`expr "$ls" : '.*-> \(.*\)$'`
-  if expr "$link" : '.*/.*' > /dev/null; then
-    this="$link"
-  else
-    this=`dirname "$this"`/"$link"
-  fi
-done
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-# the root of the Hadoop installation
-HADOOP_HOME=`dirname "$this"`/..
-
-# Allow alternate conf dir location.
-HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
+source "$bin"/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

Modified: lucene/hadoop/trunk/bin/start-all.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/start-all.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/start-all.sh (original)
+++ lucene/hadoop/trunk/bin/start-all.sh Mon Jul 24 02:17:31 2006
@@ -5,8 +5,10 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start dfs daemons
-"$bin"/start-dfs.sh
+"$bin"/start-dfs.sh --config $HADOOP_CONF_DIR
 
 # start mapred daemons
-"$bin"/start-mapred.sh
+"$bin"/start-mapred.sh --config $HADOOP_CONF_DIR

Modified: lucene/hadoop/trunk/bin/start-dfs.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/start-dfs.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/start-dfs.sh (original)
+++ lucene/hadoop/trunk/bin/start-dfs.sh Mon Jul 24 02:17:31 2006
@@ -5,8 +5,10 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start dfs daemons
 # start namenode after datanodes, to minimize time namenode is up w/o data
 # note: datanodes will log connection errors until namenode starts
-"$bin"/hadoop-daemon.sh start namenode
-"$bin"/hadoop-daemons.sh start datanode
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start namenode
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start datanode

Modified: lucene/hadoop/trunk/bin/start-mapred.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/start-mapred.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/start-mapred.sh (original)
+++ lucene/hadoop/trunk/bin/start-mapred.sh Mon Jul 24 02:17:31 2006
@@ -5,7 +5,9 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
+source "$bin"/hadoop-config.sh
+
 # start mapred daemons
 # start jobtracker first to minimize connection errors at startup
-"$bin"/hadoop-daemon.sh start jobtracker
-"$bin"/hadoop-daemons.sh start tasktracker
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start jobtracker
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR start tasktracker

Modified: lucene/hadoop/trunk/bin/stop-all.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/stop-all.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/stop-all.sh (original)
+++ lucene/hadoop/trunk/bin/stop-all.sh Mon Jul 24 02:17:31 2006
@@ -5,5 +5,7 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/stop-mapred.sh
-"$bin"/stop-dfs.sh
+source "$bin"/hadoop-config.sh
+
+"$bin"/stop-mapred.sh --config $HADOOP_CONF_DIR
+"$bin"/stop-dfs.sh --config $HADOOP_CONF_DIR

Modified: lucene/hadoop/trunk/bin/stop-dfs.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/stop-dfs.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/stop-dfs.sh (original)
+++ lucene/hadoop/trunk/bin/stop-dfs.sh Mon Jul 24 02:17:31 2006
@@ -5,6 +5,8 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/hadoop-daemon.sh stop namenode
-"$bin"/hadoop-daemons.sh stop datanode
+source "$bin"/hadoop-config.sh
+
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop namenode
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop datanode
 

Modified: lucene/hadoop/trunk/bin/stop-mapred.sh
URL: http://svn.apache.org/viewvc/lucene/hadoop/trunk/bin/stop-mapred.sh?rev=424973&r1=424972&r2=424973&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/stop-mapred.sh (original)
+++ lucene/hadoop/trunk/bin/stop-mapred.sh Mon Jul 24 02:17:31 2006
@@ -5,6 +5,8 @@
 bin=`dirname "$0"`
 bin=`cd "$bin"; pwd`
 
-"$bin"/hadoop-daemon.sh stop jobtracker
-"$bin"/hadoop-daemons.sh stop tasktracker
+source "$bin"/hadoop-config.sh
+
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop jobtracker
+"$bin"/hadoop-daemons.sh --config $HADOOP_CONF_DIR stop tasktracker