You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/02/28 22:46:35 UTC

svn commit: r381793 - in /lucene/hadoop/trunk/bin: hadoop hadoop-daemon.sh hadoop-daemons.sh slaves.sh start-all.sh stop-all.sh

Author: cutting
Date: Tue Feb 28 13:46:34 2006
New Revision: 381793

URL: http://svn.apache.org/viewcvs?rev=381793&view=rev
Log:
Escape paths so that spaces are permitted (as is common on Windows.)

Modified:
    lucene/hadoop/trunk/bin/hadoop
    lucene/hadoop/trunk/bin/hadoop-daemon.sh
    lucene/hadoop/trunk/bin/hadoop-daemons.sh
    lucene/hadoop/trunk/bin/slaves.sh
    lucene/hadoop/trunk/bin/start-all.sh
    lucene/hadoop/trunk/bin/stop-all.sh

Modified: lucene/hadoop/trunk/bin/hadoop
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop (original)
+++ lucene/hadoop/trunk/bin/hadoop Tue Feb 28 13:46:34 2006
@@ -50,7 +50,7 @@
 HADOOP_HOME=`cd "$THIS_DIR/.." ; pwd`
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 # some Java parameters

Modified: lucene/hadoop/trunk/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop-daemon.sh?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemon.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemon.sh Tue Feb 28 13:46:34 2006
@@ -37,16 +37,16 @@
 done
 
 # the root of the Hadoop installation
-HADOOP_HOME=`dirname $this`/..
+HADOOP_HOME=`dirname "$this"`/..
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 # get log directory
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR=$HADOOP_HOME/logs
-  mkdir -p $HADOOP_LOG_DIR
+  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+  mkdir -p "$HADOOP_LOG_DIR"
 fi
 
 if [ "$HADOOP_PID_DIR" = "" ]; then
@@ -77,11 +77,11 @@
       rsync -a --delete --exclude=.svn $HADOOP_MASTER/ $HADOOP_HOME
     fi
 
-    cd $HADOOP_HOME
+    cd "$HADOOP_HOME"
     echo starting $command, logging to $log
-    nohup bin/hadoop $command "$@" >& $log < /dev/null &
+    nohup bin/hadoop $command "$@" >& "$log" < /dev/null &
     echo $! > $pid
-    sleep 1; head $log
+    sleep 1; head "$log"
     ;;
           
   (stop)

Modified: lucene/hadoop/trunk/bin/hadoop-daemons.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop-daemons.sh?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemons.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemons.sh Tue Feb 28 13:46:34 2006
@@ -10,7 +10,7 @@
   exit 1
 fi
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-exec $bin/slaves.sh $bin/hadoop-daemon.sh "$@"
+exec "$bin/slaves.sh" "$bin/hadoop-daemon.sh" "$@"

Modified: lucene/hadoop/trunk/bin/slaves.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/slaves.sh?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/slaves.sh (original)
+++ lucene/hadoop/trunk/bin/slaves.sh Tue Feb 28 13:46:34 2006
@@ -28,18 +28,18 @@
 done
 
 # the root of the Hadoop installation
-HADOOP_HOME=`dirname $this`/..
+HADOOP_HOME=`dirname "$this"`/..
 
 if [ -f "$HADOOP_HOME/conf/hadoop-env.sh" ]; then
-  source ${HADOOP_HOME}/conf/hadoop-env.sh
+  source "${HADOOP_HOME}/conf/hadoop-env.sh"
 fi
 
 if [ "$HADOOP_SLAVES" = "" ]; then
-  export HADOOP_SLAVES=$HADOOP_HOME/conf/slaves
+  export HADOOP_SLAVES="$HADOOP_HOME/conf/slaves"
 fi
 
-for slave in `cat $HADOOP_SLAVES`; do
- ssh -o ConnectTimeout=1 $slave "$@" \
+for slave in `cat "$HADOOP_SLAVES"`; do
+ ssh -o ConnectTimeout=1 $slave $"${@// /\\ }" \
    2>&1 | sed "s/^/$slave: /" &
 done
 

Modified: lucene/hadoop/trunk/bin/start-all.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/start-all.sh?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/start-all.sh (original)
+++ lucene/hadoop/trunk/bin/start-all.sh Tue Feb 28 13:46:34 2006
@@ -2,10 +2,10 @@
 
 # Start all hadoop daemons.  Run this on master node.
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-$bin/hadoop-daemons.sh start datanode
-$bin/hadoop-daemon.sh start namenode
-$bin/hadoop-daemon.sh start jobtracker
-$bin/hadoop-daemons.sh start tasktracker
+"$bin"/hadoop-daemons.sh start datanode
+"$bin"/hadoop-daemon.sh start namenode
+"$bin"/hadoop-daemon.sh start jobtracker
+"$bin"/hadoop-daemons.sh start tasktracker

Modified: lucene/hadoop/trunk/bin/stop-all.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/stop-all.sh?rev=381793&r1=381792&r2=381793&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/stop-all.sh (original)
+++ lucene/hadoop/trunk/bin/stop-all.sh Tue Feb 28 13:46:34 2006
@@ -2,10 +2,10 @@
 
 # Stop all hadoop daemons.  Run this on master node.
 
-bin=`dirname $0`
-bin=`cd $bin; pwd`
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
 
-$bin/hadoop-daemon.sh stop jobtracker
-$bin/hadoop-daemons.sh stop tasktracker
-$bin/hadoop-daemon.sh stop namenode
-$bin/hadoop-daemons.sh stop datanode
+"$bin"/hadoop-daemon.sh stop jobtracker
+"$bin"/hadoop-daemons.sh stop tasktracker
+"$bin"/hadoop-daemon.sh stop namenode
+"$bin"/hadoop-daemons.sh stop datanode