You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/03/02 19:48:54 UTC

svn commit: r382465 - in /lucene/hadoop/trunk: bin/hadoop bin/hadoop-daemon.sh bin/slaves.sh conf/hadoop-env.sh.template

Author: cutting
Date: Thu Mar  2 10:48:52 2006
New Revision: 382465

URL: http://svn.apache.org/viewcvs?rev=382465&view=rev
Log:
Fix for HADOOP-60, with help from Owen & Michael.

Modified:
    lucene/hadoop/trunk/bin/hadoop
    lucene/hadoop/trunk/bin/hadoop-daemon.sh
    lucene/hadoop/trunk/bin/slaves.sh
    lucene/hadoop/trunk/conf/hadoop-env.sh.template

Modified: lucene/hadoop/trunk/bin/hadoop
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop?rev=382465&r1=382464&r2=382465&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop (original)
+++ lucene/hadoop/trunk/bin/hadoop Thu Mar  2 10:48:52 2006
@@ -4,13 +4,15 @@
 #
 # Environment Variables
 #
-#   JAVA_HOME The java implementation to use.  Overrides JAVA_HOME.
+#   JAVA_HOME        The java implementation to use.  Overrides JAVA_HOME.
 #
 #   HADOOP_HEAPSIZE  The maximum amount of heap to use, in MB. 
-#                   Default is 1000.
+#                    Default is 1000.
 #
 #   HADOOP_OPTS      Extra Java runtime options.
 #
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#
 
 # resolve links - $0 may be a softlink
 THIS="$0"
@@ -49,8 +51,11 @@
 THIS_DIR=`dirname "$THIS"`
 HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source ${HADOOP_CONF_DIR}/hadoop-env.sh
 fi
 
 # some Java parameters
@@ -74,8 +79,8 @@
   #echo $JAVA_HEAP_MAX
 fi
 
-# CLASSPATH initially contains $HADOOP_CONF_DIR, or defaults to $HADOOP_HOME/conf
-CLASSPATH=${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}
+# CLASSPATH initially contains $HADOOP_CONF_DIR
+CLASSPATH="${HADOOP_CONF_DIR}"
 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
 
 # for developers, add Hadoop classes to CLASSPATH

Modified: lucene/hadoop/trunk/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop-daemon.sh?rev=382465&r1=382464&r2=382465&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemon.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemon.sh Thu Mar  2 10:48:52 2006
@@ -4,6 +4,7 @@
 #
 # Environment Variables
 #
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 #   HADOOP_LOG_DIR   Where log files are stored.  PWD by default.
 #   HADOOP_MASTER    host:path where hadoop code should be rsync'd from
 #   HADOOP_PID_DIR   The pid files are stored. /tmp by default.
@@ -39,8 +40,11 @@
 # the root of the Hadoop installation
 HADOOP_HOME=`dirname "$this"`/..
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
 # get log directory

Modified: lucene/hadoop/trunk/bin/slaves.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/slaves.sh?rev=382465&r1=382464&r2=382465&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/slaves.sh (original)
+++ lucene/hadoop/trunk/bin/slaves.sh Thu Mar  2 10:48:52 2006
@@ -5,6 +5,7 @@
 # Environment Variables
 #
 #   HADOOP_SLAVES    File naming remote hosts.  Default is ~/.slaves
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
 ##
 
 usage="Usage: slaves.sh command..."
@@ -30,16 +31,24 @@
 # the root of the Hadoop installation
 HADOOP_HOME=`dirname "$this"`/..
 
-if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source "${HADOOP_HOME}/conf/hadoop-env.sh"
+# Allow alternate conf dir location.
+HADOOP_CONF_DIR="${HADOOP_CONF_DIR:=$HADOOP_HOME/conf}"
+
+if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
+  source "${HADOOP_CONF_DIR}/hadoop-env.sh"
 fi
 
 if [ "$HADOOP_SLAVES" = "" ]; then
-  export HADOOP_SLAVES="$HADOOP_HOME/conf/slaves"
+  export HADOOP_SLAVES="${HADOOP_CONF_DIR}/slaves"
 fi
 
+# By default, forward HADOOP_CONF_DIR environment variable to the
+# remote slave. Remote slave must have following added to its
+# /etc/ssh/sshd_config:
+#   AcceptEnv HADOOP_CONF_DIR
+# See'man ssh_config for more on SendEnv and AcceptEnv.
 if [ "$HADOOP_SSH_OPTS" = "" ]; then
-  export HADOOP_SSH_OPTS="-o ConnectTimeout=1"
+  export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 fi
 
 for slave in `cat "$HADOOP_SLAVES"`; do

Modified: lucene/hadoop/trunk/conf/hadoop-env.sh.template
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/conf/hadoop-env.sh.template?rev=382465&r1=382464&r2=382465&view=diff
==============================================================================
--- lucene/hadoop/trunk/conf/hadoop-env.sh.template (original)
+++ lucene/hadoop/trunk/conf/hadoop-env.sh.template Thu Mar  2 10:48:52 2006
@@ -14,8 +14,8 @@
 # Extra Java runtime options.  Empty by default.
 # export HADOOP_OPTS=-server
 
-# Extra ssh options.  Default is '-o ConnectTimeout=1'.
-# export HADOOP_SSH_OPTS="-o ConnectTimeout=1"
+# Extra ssh options.  Default: '-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR'.
+# export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 
 # Where log files are stored.  $HADOOP_HOME/logs by default.
 # export HADOOP_LOG_DIR=${HADOOP_HOME}/logs