You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/05/27 18:35:04 UTC

svn commit: r1128385 [1/2] - in /hadoop/common/trunk: ./ bin/ conf/ ivy/ src/docs/cn/src/documentation/content/xdocs/ src/docs/src/documentation/content/xdocs/ src/native/ src/native/lib/ src/native/src/org/apache/hadoop/io/compress/zlib/ src/packages/...

Author: omalley
Date: Fri May 27 16:35:02 2011
New Revision: 1128385

URL: http://svn.apache.org/viewvc?rev=1128385&view=rev
Log:
HADOOP-6255. Create RPM and Debian packages for common. Changes deployment
layout to be consistent across the binary tgz, rpm, and deb. Adds setup
scripts for easy one node cluster configuration and user creation.
(Eric Yang via omalley)

Added:
    hadoop/common/trunk/src/docs/src/documentation/content/xdocs/deployment_layout.xml
    hadoop/common/trunk/src/packages/
    hadoop/common/trunk/src/packages/deb/
    hadoop/common/trunk/src/packages/deb/hadoop.control/
    hadoop/common/trunk/src/packages/deb/hadoop.control/conffile
    hadoop/common/trunk/src/packages/deb/hadoop.control/control
    hadoop/common/trunk/src/packages/deb/hadoop.control/postinst
    hadoop/common/trunk/src/packages/deb/hadoop.control/postrm
    hadoop/common/trunk/src/packages/deb/hadoop.control/preinst
    hadoop/common/trunk/src/packages/deb/hadoop.control/prerm
    hadoop/common/trunk/src/packages/deb/init.d/
    hadoop/common/trunk/src/packages/deb/init.d/hadoop-datanode
    hadoop/common/trunk/src/packages/deb/init.d/hadoop-jobtracker
    hadoop/common/trunk/src/packages/deb/init.d/hadoop-namenode
    hadoop/common/trunk/src/packages/deb/init.d/hadoop-tasktracker
    hadoop/common/trunk/src/packages/hadoop-create-user.sh
    hadoop/common/trunk/src/packages/hadoop-setup-conf.sh
    hadoop/common/trunk/src/packages/hadoop-setup-hdfs.sh
    hadoop/common/trunk/src/packages/hadoop-setup-single-node.sh
    hadoop/common/trunk/src/packages/rpm/
    hadoop/common/trunk/src/packages/rpm/init.d/
    hadoop/common/trunk/src/packages/rpm/init.d/hadoop-datanode
    hadoop/common/trunk/src/packages/rpm/init.d/hadoop-jobtracker
    hadoop/common/trunk/src/packages/rpm/init.d/hadoop-namenode
    hadoop/common/trunk/src/packages/rpm/init.d/hadoop-tasktracker
    hadoop/common/trunk/src/packages/rpm/spec/
    hadoop/common/trunk/src/packages/rpm/spec/hadoop.spec
    hadoop/common/trunk/src/packages/templates/
    hadoop/common/trunk/src/packages/templates/conf/
    hadoop/common/trunk/src/packages/templates/conf/core-site.xml
    hadoop/common/trunk/src/packages/update-hadoop-env.sh
Modified:
    hadoop/common/trunk/CHANGES.txt
    hadoop/common/trunk/bin/hadoop
    hadoop/common/trunk/bin/hadoop-config.sh
    hadoop/common/trunk/bin/hadoop-daemon.sh
    hadoop/common/trunk/bin/hadoop-daemons.sh
    hadoop/common/trunk/bin/rcc
    hadoop/common/trunk/bin/slaves.sh
    hadoop/common/trunk/bin/start-all.sh
    hadoop/common/trunk/bin/stop-all.sh
    hadoop/common/trunk/build.xml
    hadoop/common/trunk/conf/hadoop-env.sh.template
    hadoop/common/trunk/ivy.xml
    hadoop/common/trunk/ivy/libraries.properties
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/commands_manual.xml
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/hod_admin_guide.xml
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/quickstart.xml
    hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/streaming.xml
    hadoop/common/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/common/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml
    hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml
    hadoop/common/trunk/src/docs/src/documentation/content/xdocs/site.xml
    hadoop/common/trunk/src/native/Makefile.am
    hadoop/common/trunk/src/native/lib/Makefile.am
    hadoop/common/trunk/src/native/packageNativeHadoop.sh
    hadoop/common/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am
    hadoop/common/trunk/src/test/system/c++/runAs/configure
    hadoop/common/trunk/src/test/system/c++/runAs/configure.ac
    hadoop/common/trunk/src/test/system/c++/runAs/runAs.c
    hadoop/common/trunk/src/test/system/c++/runAs/runAs.h.in
    hadoop/common/trunk/src/test/system/java/org/apache/hadoop/test/system/process/HadoopDaemonRemoteCluster.java

Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Fri May 27 16:35:02 2011
@@ -2402,6 +2402,15 @@ Release 0.20.3 - Unreleased
 
     HADOOP-7072. Remove java5 dependencies from build. (cos)
 
+Release 0.20.204.0 - Unreleased
+
+  NEW FEATURES
+
+    HADOOP-6255. Create RPM and Debian packages for common. Changes deployment
+    layout to be consistent across the binary tgz, rpm, and deb. Adds setup
+    scripts for easy one node cluster configuration and user creation.
+    (Eric Yang via omalley)
+
 Release 0.20.203.0 - 2011-5-11
 
   BUG FIXES

Modified: hadoop/common/trunk/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop (original)
+++ hadoop/common/trunk/bin/hadoop Fri May 27 16:35:02 2011
@@ -17,10 +17,11 @@
 
 # This script runs the hadoop core commands. 
 
-bin=`dirname "$0"`
+bin=`which $0`
+bin=`dirname ${bin}`
 bin=`cd "$bin"; pwd`
  
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 function print_usage(){
   echo "Usage: hadoop [--config confdir] COMMAND"
@@ -54,8 +55,8 @@ case $COMMAND in
     #try to locate hdfs and if present, delegate to it.  
     if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
       exec "${HADOOP_HDFS_HOME}"/bin/hdfs $*
-    elif [ -f "${HADOOP_HOME}"/bin/hdfs ]; then
-      exec "${HADOOP_HOME}"/bin/hdfs $*
+    elif [ -f "${HADOOP_PREFIX}"/bin/hdfs ]; then
+      exec "${HADOOP_PREFIX}"/bin/hdfs $*
     else
       echo "HDFS not found."
       exit
@@ -70,8 +71,8 @@ case $COMMAND in
     #try to locate mapred and if present, delegate to it.
     if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
       exec "${HADOOP_MAPRED_HOME}"/bin/mapred $*
-    elif [ -f "${HADOOP_HOME}"/bin/mapred ]; then
-      exec "${HADOOP_HOME}"/bin/mapred $* 
+    elif [ -f "${HADOOP_PREFIX}"/bin/mapred ]; then
+      exec "${HADOOP_PREFIX}"/bin/mapred $* 
     else
       echo "MAPRED not found."
       exit

Modified: hadoop/common/trunk/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop-config.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop-config.sh (original)
+++ hadoop/common/trunk/bin/hadoop-config.sh Fri May 27 16:35:02 2011
@@ -26,11 +26,8 @@ script="$(basename -- "$this")"
 this="$common_bin/$script"
 
 # the root of the Hadoop installation
-#TODO: change the env variable when dir structure is changed
-export HADOOP_HOME=`dirname "$this"`/..
-export HADOOP_COMMON_HOME="${HADOOP_HOME}"
-#export HADOOP_HOME=`dirname "$this"`/../..
-#export HADOOP_COMMON_HOME="${HADOOP_COMMON_HOME:-`dirname "$this"`/..}"
+# See HADOOP-6255 for directory structure layout
+export HADOOP_PREFIX=`dirname "$this"`/..
 
 #check to see if the conf dir is given as an optional argument
 if [ $# -gt 1 ]
@@ -45,7 +42,13 @@ then
 fi
  
 # Allow alternate conf dir location.
-export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_HOME/conf}"
+if [ -e "${HADOOP_PREFIX}/conf/hadoop-env.sh" ]; then
+  DEFAULT_CONF_DIR="conf"
+else
+  DEFAULT_CONF_DIR="etc/hadoop"
+fi
+
+export HADOOP_CONF_DIR="${HADOOP_CONF_DIR:-$HADOOP_PREFIX/$DEFAULT_CONF_DIR}"
 
 # User can specify hostnames or a file where the hostnames are (not both)
 if [[ ( "$HADOOP_SLAVES" != '' ) && ( "$HADOOP_SLAVE_NAMES" != '' ) ]] ; then
@@ -130,54 +133,61 @@ CLASSPATH="${HADOOP_CONF_DIR}"
 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
 
 # for developers, add Hadoop classes to CLASSPATH
-if [ -d "$HADOOP_COMMON_HOME/build/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/classes
+if [ -d "$HADOOP_PREFIX/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
 fi
-if [ -d "$HADOOP_COMMON_HOME/build/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build
+if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
 fi
-if [ -d "$HADOOP_COMMON_HOME/build/test/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/test/classes
+if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
 fi
-if [ -d "$HADOOP_COMMON_HOME/build/test/core/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/test/core/classes
+if [ -d "$HADOOP_PREFIX/build/test/core/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/core/classes
 fi
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
 # for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_COMMON_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME
+if [ -d "$HADOOP_PREFIX/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
+fi
+
+if [ -d "$HADOOP_PREFIX/share/hadoop/common/lib" ]; then
+  for f in $HADOOP_PREFIX/share/hadoop/common/lib/*.jar; do
+    CLASSPATH=${CLASSPATH}:$f;
+  done
 fi
-for f in $HADOOP_COMMON_HOME/hadoop-*.jar; do
+
+for f in $HADOOP_PREFIX/share/hadoop/common/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-# add libs to CLASSPATH
-for f in $HADOOP_COMMON_HOME/lib/*.jar; do
+# for developers, add libs to CLASSPATH
+for f in $HADOOP_PREFIX/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Common/common" ]; then
-for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Common/common/*.jar; do
+if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common" ]; then
+for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop-Common/common/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 fi
 
-if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Hdfs/common" ]; then
-for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Hdfs/common/*.jar; do
+if [ -d "$HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs" ]; then
+for f in $HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/hdfs/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 fi
 
-if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+if [ -d "$HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred" ]; then
+for f in $HADOOP_PREFIX/build/ivy/lib/Hadoop/mapred/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 fi
 
-for f in $HADOOP_COMMON_HOME/lib/jsp-2.1/*.jar; do
+for f in $HADOOP_PREFIX/lib/jsp-2.1/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
@@ -188,7 +198,7 @@ fi
 
 # default log directory & file
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+  HADOOP_LOG_DIR="$HADOOP_PREFIX/logs"
 fi
 if [ "$HADOOP_LOGFILE" = "" ]; then
   HADOOP_LOGFILE='hadoop.log'
@@ -204,33 +214,37 @@ unset IFS
 
 # cygwin path translation
 if $cygwin; then
-  HADOOP_COMMON_HOME=`cygpath -w "$HADOOP_COMMON_HOME"`
+  HADOOP_PREFIX=`cygpath -w "$HADOOP_PREFIX"`
   HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
   JAVA_LIBRARY_PATH=`cygpath -w "$JAVA_LIBRARY_PATH"`
 fi
 
 # setup 'java.library.path' for native-hadoop code if necessary
 
-if [ -d "${HADOOP_COMMON_HOME}/build/native" -o -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
+if [ -d "${HADOOP_PREFIX}/build/native" -o -d "${HADOOP_PREFIX}/lib/native" ]; then
   JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
   
-  if [ -d "$HADOOP_COMMON_HOME/build/native" ]; then
+  if [ -d "$HADOOP_PREFIX/build/native" ]; then
     if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-        JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_COMMON_HOME}/build/native/${JAVA_PLATFORM}/lib
+        JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_PREFIX}/build/native/${JAVA_PLATFORM}/lib
     else
-        JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/build/native/${JAVA_PLATFORM}/lib
+        JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/build/native/${JAVA_PLATFORM}/lib
     fi
   fi
   
-  if [ -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
+  if [ -d "${HADOOP_PREFIX}/lib/native" ]; then
     if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
-      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
+      JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_PREFIX}/lib/native/${JAVA_PLATFORM}
     else
-      JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
+      JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib/native/${JAVA_PLATFORM}
     fi
   fi
 fi
 
+if [ -e "${HADOOP_PREFIX}/lib/libhadoop.a" ]; then
+  JAVA_LIBRARY_PATH=${HADOOP_PREFIX}/lib
+fi
+
 # cygwin path translation
 if $cygwin; then
   JAVA_LIBRARY_PATH=`cygpath -p "$JAVA_LIBRARY_PATH"`
@@ -238,7 +252,7 @@ fi
 
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_COMMON_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_PREFIX"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
 HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.security.logger=${HADOOP_SECURITY_LOGGER:-INFO,console}"
@@ -252,8 +266,8 @@ HADOOP_OPTS="$HADOOP_OPTS -Djava.net.pre
 
 # put hdfs in classpath if present
 if [ "$HADOOP_HDFS_HOME" = "" ]; then
-  if [ -d "${HADOOP_HOME}/hdfs" ]; then
-    HADOOP_HDFS_HOME=$HADOOP_HOME/hdfs
+  if [ -d "${HADOOP_PREFIX}/share/hadoop/hdfs" ]; then
+    HADOOP_HDFS_HOME=$HADOOP_PREFIX/share/hadoop/hdfs
     #echo Found HDFS installed at $HADOOP_HDFS_HOME
   fi
 fi
@@ -273,9 +287,11 @@ if [ -d "${HADOOP_HDFS_HOME}" ]; then
   done
 
   # add libs to CLASSPATH
-  for f in $HADOOP_HDFS_HOME/lib/*.jar; do
-    CLASSPATH=${CLASSPATH}:$f;
-  done
+  if [ -d "${HADOOP_HDFS_HOME}/lib" ]; then
+    for f in $HADOOP_HDFS_HOME/lib/*.jar; do
+      CLASSPATH=${CLASSPATH}:$f;
+    done
+  fi
   
   if [ -d "$HADOOP_HDFS_HOME/build/classes" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/classes
@@ -289,9 +305,8 @@ fi
 
 # set mapred home if mapred is present
 if [ "$HADOOP_MAPRED_HOME" = "" ]; then
-  if [ -d "${HADOOP_HOME}/mapred" ]; then
-    HADOOP_MAPRED_HOME=$HADOOP_HOME/mapred
-    #echo Found MAPRED installed at $HADOOP_MAPRED_HOME
+  if [ -d "${HADOOP_PREFIX}/share/hadoop/mapreduce" ]; then
+    HADOOP_MAPRED_HOME=$HADOOP_PREFIX/share/hadoop/mapreduce
   fi
 fi
 
@@ -305,13 +320,15 @@ if [ -d "${HADOOP_MAPRED_HOME}" ]; then
     CLASSPATH=${CLASSPATH}:${HADOOP_MAPRED_HOME}/conf
   fi
   
-  for f in $HADOOP_MAPRED_HOME/hadoop-mapred-*.jar; do
+  for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-*.jar; do
     CLASSPATH=${CLASSPATH}:$f
   done
 
-  for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
-    CLASSPATH=${CLASSPATH}:$f
-  done
+  if [ -d "${HADOOP_MAPRED_HOME}/lib" ]; then
+    for f in $HADOOP_MAPRED_HOME/lib/*.jar; do
+      CLASSPATH=${CLASSPATH}:$f
+    done
+  fi
 
   if [ -d "$HADOOP_MAPRED_HOME/build/classes" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/classes
@@ -321,10 +338,10 @@ if [ -d "${HADOOP_MAPRED_HOME}" ]; then
     CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME/build/tools
   fi
 
-  for f in $HADOOP_MAPRED_HOME/hadoop-mapred-tools-*.jar; do
+  for f in $HADOOP_MAPRED_HOME/hadoop-mapreduce-tools-*.jar; do
     TOOL_PATH=${TOOL_PATH}:$f;
   done
-  for f in $HADOOP_MAPRED_HOME/build/hadoop-mapred-tools-*.jar; do
+  for f in $HADOOP_MAPRED_HOME/build/hadoop-mapreduce-tools-*.jar; do
     TOOL_PATH=${TOOL_PATH}:$f;
   done
 fi

Modified: hadoop/common/trunk/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop-daemon.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop-daemon.sh (original)
+++ hadoop/common/trunk/bin/hadoop-daemon.sh Fri May 27 16:35:02 2011
@@ -20,7 +20,7 @@
 #
 # Environment Variables
 #
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
 #   HADOOP_LOG_DIR   Where log files are stored.  PWD by default.
 #   HADOOP_MASTER    host:path where hadoop code should be rsync'd from
 #   HADOOP_PID_DIR   The pid files are stored. /tmp by default.
@@ -39,12 +39,12 @@ fi
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 # get arguments
 
 #default value
-hadoopScript="$HADOOP_HOME"/bin/hadoop
+hadoopScript="$HADOOP_PREFIX"/bin/hadoop
 if [ "--script" = "$1" ]
   then
     shift
@@ -91,7 +91,7 @@ fi
 
 # get log directory
 if [ "$HADOOP_LOG_DIR" = "" ]; then
-  export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
+  export HADOOP_LOG_DIR="$HADOOP_PREFIX/logs"
 fi
 mkdir -p "$HADOOP_LOG_DIR"
 chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
@@ -127,12 +127,12 @@ case $startStop in
 
     if [ "$HADOOP_MASTER" != "" ]; then
       echo rsync from $HADOOP_MASTER
-      rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HADOOP_MASTER/ "$HADOOP_HOME"
+      rsync -a -e ssh --delete --exclude=.svn --exclude='logs/*' --exclude='contrib/hod/logs/*' $HADOOP_MASTER/ "$HADOOP_PREFIX"
     fi
 
     hadoop_rotate_log $log
     echo starting $command, logging to $log
-    cd "$HADOOP_HOME"
+    cd "$HADOOP_PREFIX"
     nohup nice -n $HADOOP_NICENESS $hadoopScript --config $HADOOP_CONF_DIR $command "$@" > "$log" 2>&1 < /dev/null &
     echo $! > $pid
     sleep 1; head "$log"

Modified: hadoop/common/trunk/bin/hadoop-daemons.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop-daemons.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop-daemons.sh (original)
+++ hadoop/common/trunk/bin/hadoop-daemons.sh Fri May 27 16:35:02 2011
@@ -29,6 +29,6 @@ fi
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. $bin/hadoop-config.sh
+. $bin/../libexec/hadoop-config.sh
 
-exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_HOME" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"
+exec "$bin/slaves.sh" --config $HADOOP_CONF_DIR cd "$HADOOP_PREFIX" \; "$bin/hadoop-daemon.sh" --config $HADOOP_CONF_DIR "$@"

Modified: hadoop/common/trunk/bin/rcc
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/rcc?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/rcc (original)
+++ hadoop/common/trunk/bin/rcc Fri May 27 16:35:02 2011
@@ -24,13 +24,13 @@
 #
 #   HADOOP_OPTS      Extra Java runtime options.
 #
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
 #
 
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"
@@ -55,33 +55,33 @@ CLASSPATH="${HADOOP_CONF_DIR}"
 CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
 
 # for developers, add Hadoop classes to CLASSPATH
-if [ -d "$HADOOP_HOME/build/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/classes
+if [ -d "$HADOOP_PREFIX/build/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/classes
 fi
-if [ -d "$HADOOP_HOME/build/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build
+if [ -d "$HADOOP_PREFIX/build/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build
 fi
-if [ -d "$HADOOP_HOME/build/test/classes" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME/build/test/classes
+if [ -d "$HADOOP_PREFIX/build/test/classes" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/build/test/classes
 fi
 
 # so that filenames w/ spaces are handled correctly in loops below
 IFS=
 
 # for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_HOME/webapps" ]; then
-  CLASSPATH=${CLASSPATH}:$HADOOP_HOME
+if [ -d "$HADOOP_PREFIX/webapps" ]; then
+  CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX
 fi
-for f in $HADOOP_HOME/hadoop-*.jar; do
+for f in $HADOOP_PREFIX/hadoop-*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
 # add libs to CLASSPATH
-for f in $HADOOP_HOME/lib/*.jar; do
+for f in $HADOOP_PREFIX/lib/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 
-for f in $HADOOP_HOME/lib/jetty-ext/*.jar; do
+for f in $HADOOP_PREFIX/lib/jetty-ext/*.jar; do
   CLASSPATH=${CLASSPATH}:$f;
 done
 

Modified: hadoop/common/trunk/bin/slaves.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/slaves.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/slaves.sh (original)
+++ hadoop/common/trunk/bin/slaves.sh Fri May 27 16:35:02 2011
@@ -22,7 +22,7 @@
 #
 #   HADOOP_SLAVES    File naming remote hosts.
 #     Default is ${HADOOP_CONF_DIR}/slaves.
-#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_HOME}/conf.
+#   HADOOP_CONF_DIR  Alternate conf dir. Default is ${HADOOP_PREFIX}/conf.
 #   HADOOP_SLAVE_SLEEP Seconds to sleep between spawning remote commands.
 #   HADOOP_SSH_OPTS Options passed to ssh when running remote commands.
 ##
@@ -38,7 +38,7 @@ fi
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 if [ -f "${HADOOP_CONF_DIR}/hadoop-env.sh" ]; then
   . "${HADOOP_CONF_DIR}/hadoop-env.sh"

Modified: hadoop/common/trunk/bin/start-all.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/start-all.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/start-all.sh (original)
+++ hadoop/common/trunk/bin/start-all.sh Fri May 27 16:35:02 2011
@@ -23,7 +23,7 @@ echo "This script is Deprecated. Instead
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 # start hdfs daemons if hdfs is present
 if [ -f "${HADOOP_HDFS_HOME}"/bin/start-dfs.sh ]; then

Modified: hadoop/common/trunk/bin/stop-all.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/stop-all.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/bin/stop-all.sh (original)
+++ hadoop/common/trunk/bin/stop-all.sh Fri May 27 16:35:02 2011
@@ -23,7 +23,7 @@ echo "This script is Deprecated. Instead
 bin=`dirname "${BASH_SOURCE-$0}"`
 bin=`cd "$bin"; pwd`
 
-. "$bin"/hadoop-config.sh
+. "$bin"/../libexec/hadoop-config.sh
 
 # stop hdfs daemons if hdfs is present
 if [ -f "${HADOOP_HDFS_HOME}"/bin/stop-dfs.sh ]; then

Modified: hadoop/common/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/build.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/build.xml (original)
+++ hadoop/common/trunk/build.xml Fri May 27 16:35:02 2011
@@ -26,12 +26,15 @@
   <property file="${user.home}/build.properties" />
   <property file="${basedir}/build.properties" />
  
-  <property name="Name" value="Hadoop-common"/>
-  <property name="name" value="hadoop-common"/>
-  <property name="version" value="0.23.0-SNAPSHOT"/>
+  <property name="module" value="common"/>
+  <property name="Name" value="Hadoop-${module}"/>
+  <property name="name" value="hadoop-${module}"/>
+  <property name="_version" value="0.23.0"/>
+  <property name="version" value="${_version}-SNAPSHOT"/>
   <property name="final.name" value="${name}-${version}"/>
   <property name="test.final.name" value="${name}-test-${version}"/>
   <property name="year" value="2009"/>
+  <property name="package.release" value="1"/>
 
   <property name="src.dir" value="${basedir}/src"/>  	
   <property name="java.src.dir" value="${src.dir}/java"/>
@@ -196,7 +199,18 @@
      <equals arg1="${repo}" arg2="staging"/>
   </condition>
 
-<!-- the normal classpath -->
+  <!-- packaging properties -->
+  <property name="package.prefix" value="/usr"/>
+  <property name="package.conf.dir" value="/etc/hadoop"/>
+  <property name="package.log.dir" value="/var/log/hadoop"/>
+  <property name="package.pid.dir" value="/var/run/hadoop"/>
+  <property name="package.var.dir" value="/var/lib/hadoop"/>
+  <property name="package.share.dir" value="/share/hadoop/${module}"/>
+  <!-- Use fixed path to build rpm for avoiding rpmbuild conflict with dash path names -->
+  <property name="package.buildroot" value="/tmp/hadoop_package_build_${user.name}"/>
+  <property name="package.build.dir" value="/tmp/hadoop_package_build_${user.name}/BUILD"/>
+
+  <!-- the normal classpath -->
   <path id="classpath">
     <pathelement location="${build.classes}"/>
     <pathelement location="${conf.dir}"/>
@@ -1073,6 +1087,7 @@
 	  description="Build distribution">
     <mkdir dir="${dist.dir}"/>
     <mkdir dir="${dist.dir}/lib"/>
+    <mkdir dir="${dist.dir}/libexec"/>
     <mkdir dir="${dist.dir}/bin"/>
     <mkdir dir="${dist.dir}/docs"/>
     <mkdir dir="${dist.dir}/docs/api"/>
@@ -1122,6 +1137,12 @@
       <fileset dir="ivy"/>
     </copy>
 
+    <copy todir="${dist.dir}/libexec">
+      <fileset dir="bin">
+        <include name="hadoop-config.sh"/>
+      </fileset>
+    </copy>
+
     <copy todir="${dist.dir}">
       <fileset dir=".">
         <include name="*.txt" />
@@ -1160,17 +1181,24 @@
     </macro_tar>
   </target>
 
-  <target name="bin-package" depends="compile, jar, jar-test" 
+  <target name="bin-package" depends="compile, jar, jar-test, javadoc" 
 		description="assembles artifacts for binary target">
     <mkdir dir="${dist.dir}"/>
     <mkdir dir="${dist.dir}/lib"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/contrib"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/templates"/>
+    <mkdir dir="${dist.dir}/${package.share.dir}/webapps"/>
     <mkdir dir="${dist.dir}/bin"/>
+    <mkdir dir="${dist.dir}/libexec"/>
+    <mkdir dir="${dist.dir}/sbin"/>
+    <mkdir dir="${dist.dir}/var/log"/>
+    <mkdir dir="${dist.dir}/var/run"/>
 
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false" flatten="true">
+    <copy todir="${dist.dir}/${package.share.dir}/lib" includeEmptyDirs="false" flatten="true">
       <fileset dir="${common.ivy.lib.dir}"/>
     </copy>
 
-    <copy todir="${dist.dir}/lib" includeEmptyDirs="false">
+    <copy todir="${dist.dir}/${package.share.dir}" includeEmptyDirs="false">
       <fileset dir="lib">
         <exclude name="**/native/**"/>
       </fileset>
@@ -1179,38 +1207,57 @@
   	<exec dir="${dist.dir}" executable="sh" failonerror="true">
 	  <env key="BASE_NATIVE_LIB_DIR" value="${lib.dir}/native"/>
 	  <env key="BUILD_NATIVE_DIR" value="${build.dir}/native"/>
-	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib/native"/>
+	  <env key="DIST_LIB_DIR" value="${dist.dir}/lib"/>
 	  <arg line="${native.src.dir}/packageNativeHadoop.sh"/>
     </exec>
 
-    <copy todir="${dist.dir}"> 
-      <fileset file="${build.dir}/${final.name}.jar"/>
+    <copy todir="${dist.dir}/${package.share.dir}"> 
+      <fileset file="${build.dir}/*.jar"/>
     </copy>
     
     <copy todir="${dist.dir}/bin">
-      <fileset dir="bin"/>
+      <fileset dir="bin">
+        <include name="hadoop"/>
+      </fileset>
     </copy>
 
-    <copy todir="${dist.dir}/conf">
-      <fileset dir="${conf.dir}" excludes="**/*.template"/>
+    <copy todir="${dist.dir}/libexec">
+      <fileset dir="bin">
+        <include name="hadoop-config.sh"/>
+      </fileset>
     </copy>
 
-    <copy file="ivy.xml" tofile="${dist.dir}/ivy.xml"/>
+    <copy todir="${dist.dir}/sbin">
+      <fileset dir="bin">
+        <include name="hadoop-daemon.sh"/>
+        <include name="hadoop-daemons.sh"/>
+        <include name="slaves.sh"/>
+        <include name="start-all.sh"/>
+        <include name="stop-all.sh"/>
+      </fileset>
+      <fileset dir="${basedir}/src/packages">
+        <include name="*.sh" />
+      </fileset>
+    </copy>
 
-    <copy todir="${dist.dir}/ivy">
-      <fileset dir="ivy"/>
+    <copy todir="${dist.dir}/etc/hadoop">
+      <fileset dir="${conf.dir}" excludes="**/*.template"/>
+      <fileset dir="${conf.dir}" includes="hadoop-env.sh.template"/>
     </copy>
 
-    <copy todir="${dist.dir}">
+    <copy todir="${dist.dir}/${package.share.dir}/templates">
+      <fileset dir="${basedir}/src/packages/templates/conf" includes="*"/>
+    </copy>
+
+    <copy todir="${dist.dir}/share/doc/hadoop/${module}">
       <fileset dir=".">
         <include name="*.txt" />
       </fileset>
     </copy>
 
-    <copy todir="${dist.dir}/" file="build.xml"/>
-
     <chmod perm="ugo+x" type="file" parallel="false">
         <fileset dir="${dist.dir}/bin"/>
+        <fileset dir="${dist.dir}/sbin"/>
     </chmod>
   </target>
 
@@ -1219,16 +1266,123 @@
       <param.listofitems>
         <tarfileset dir="${build.dir}" mode="664">
           <exclude name="${final.name}/bin/*" />
+          <exclude name="${final.name}/libexec/*" />
+          <exclude name="${final.name}/sbin/*" />
           <exclude name="${final.name}/src/**" />
           <exclude name="${final.name}/docs/**" />
           <include name="${final.name}/**" />
         </tarfileset>
         <tarfileset dir="${build.dir}" mode="755">
           <include name="${final.name}/bin/*" />
+          <include name="${final.name}/libexec/*" />
+          <include name="${final.name}/sbin/*" />
         </tarfileset>
       </param.listofitems>
     </macro_tar>
   </target>
+
+  <target name="rpm" depends="binary" description="Make rpm package">
+    <mkdir dir="${package.buildroot}/BUILD" />
+    <mkdir dir="${package.buildroot}/RPMS" />
+    <mkdir dir="${package.buildroot}/SRPMS" />
+    <mkdir dir="${package.buildroot}/SOURCES" />
+    <mkdir dir="${package.buildroot}/SPECS" />
+    <copy todir="${package.buildroot}/SOURCES">
+      <fileset dir="${build.dir}">
+        <include name="${final.name}-bin.tar.gz" />
+      </fileset>
+    </copy>
+    <copy file="${src.dir}/packages/rpm/spec/hadoop.spec" todir="${package.buildroot}/SPECS">
+      <filterchain>
+        <replacetokens>
+          <token key="final.name" value="${final.name}" />
+          <token key="version" value="${_version}" />
+          <token key="package.release" value="${package.release}" />
+          <token key="package.build.dir" value="${package.build.dir}" />
+          <token key="package.prefix" value="${package.prefix}" />
+          <token key="package.conf.dir" value="${package.conf.dir}" />
+          <token key="package.log.dir" value="${package.log.dir}" />
+          <token key="package.pid.dir" value="${package.pid.dir}" />
+          <token key="package.var.dir" value="${package.var.dir}" />
+        </replacetokens>
+      </filterchain>
+    </copy>
+    <rpm specFile="hadoop.spec" command="-bb --target ${os.arch}" topDir="${package.buildroot}" cleanBuildDir="true" failOnError="true"/>
+    <copy todir="${build.dir}/" flatten="true">
+      <fileset dir="${package.buildroot}/RPMS">
+        <include name="**/*.rpm" />
+      </fileset>
+    </copy>
+    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
+  </target>
+
+  <target name="deb" depends="ivy-retrieve-package, binary" description="Make deb package">
+    <taskdef name="deb"
+           classname="org.vafer.jdeb.ant.DebAntTask">
+      <classpath refid="ivy-package.classpath" />
+    </taskdef>
+
+    <mkdir dir="${package.build.dir}/hadoop.control" />
+    <mkdir dir="${package.buildroot}/${package.prefix}" />
+    <copy todir="${package.buildroot}/${package.prefix}">
+      <fileset dir="${build.dir}/${final.name}">
+        <include name="**" />
+      </fileset>
+    </copy>
+    <copy todir="${package.build.dir}/hadoop.control">
+      <fileset dir="${src.dir}/packages/deb/hadoop.control">
+        <exclude name="control" />
+      </fileset>
+    </copy>
+    <copy file="${src.dir}/packages/deb/hadoop.control/control" todir="${package.build.dir}/hadoop.control">
+      <filterchain>
+        <replacetokens>
+          <token key="final.name" value="${final.name}" />
+          <token key="version" value="${_version}" />
+          <token key="package.release" value="${package.release}" />
+          <token key="package.build.dir" value="${package.build.dir}" />
+          <token key="package.prefix" value="${package.prefix}" />
+          <token key="package.conf.dir" value="${package.conf.dir}" />
+          <token key="package.log.dir" value="${package.log.dir}" />
+          <token key="package.pid.dir" value="${package.pid.dir}" />
+        </replacetokens>
+      </filterchain>
+    </copy>
+    <deb destfile="${package.buildroot}/${name}_${_version}-${package.release}_${os.arch}.deb" control="${package.build.dir}/hadoop.control">
+      <tarfileset dir="${build.dir}/${final.name}" filemode="644" prefix="${package.prefix}">
+        <exclude name="bin" />
+        <exclude name="etc" />
+        <exclude name="libexec" />
+        <exclude name="etc/**" />
+        <exclude name="sbin" />
+        <include name="**" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}/bin" filemode="755" prefix="${package.prefix}/bin">
+        <include name="*" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}/libexec" filemode="755" prefix="${package.prefix}/libexec">
+        <include name="*" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}/sbin" filemode="755" prefix="${package.prefix}/sbin">
+        <include name="*" />
+      </tarfileset>
+      <tarfileset dir="${src.dir}/packages" filemode="755" prefix="${package.prefix}/sbin">
+        <include name="*.sh" />
+      </tarfileset>
+      <tarfileset dir="${build.dir}/${final.name}/etc/hadoop" filemode="644" prefix="${package.conf.dir}">
+        <exclude name="core-site.xml" />
+        <exclude name="hdfs-site.xml" />
+        <exclude name="mapred-site.xml" />
+        <include name="**" /> 
+      </tarfileset>
+    </deb>
+    <copy todir="${build.dir}/" flatten="true">
+      <fileset dir="${package.buildroot}">
+        <include name="**/hadoop*.deb" />
+      </fileset>
+    </copy>
+    <delete dir="${package.buildroot}" quiet="true" verbose="false"/>
+  </target>
   
   <target name="ant-task-download" description="To download mvn-ant-task" unless="offline">
     <get src="${ant_task_repo_url}" dest="${ant_task.jar}" usetimestamp="true"/>
@@ -1420,6 +1574,7 @@
   <!-- ================================================================== -->
   <target name="clean" depends="clean-sign, clean-fi" description="Clean.  Delete the build files, and their directories">
     <delete dir="${build.dir}"/>
+    <delete dir="${package.buildroot}"/>
     <delete file="${basedir}/ivy/hadoop-common.xml"/>
     <delete file="${basedir}/ivy/hadoop-common-pom.xml"/>
     <delete file="${basedir}/ivy/hadoop-common-test.xml"/>
@@ -1653,6 +1808,11 @@
     	log="${ivyresolvelog}"/>
   </target>
 
+  <target name="ivy-resolve-package" depends="ivy-init">
+    <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="package"
+    	log="${ivyresolvelog}"/>
+  </target>
+
   <target name="ivy-resolve-jdiff" depends="ivy-init">
     <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="jdiff"
     	log="${ivyresolvelog}"/>
@@ -1710,6 +1870,14 @@
     <ivy:cachepath pathid="ivy-common.classpath" conf="common"/>
   </target>
 
+  <target name="ivy-retrieve-package" depends="ivy-resolve-package"
+    description="Retrieve Ivy-managed artifacts for the package configurations">
+    <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+      pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+    		log="${ivyretrievelog}"/>
+    <ivy:cachepath pathid="ivy-package.classpath" conf="package"/>
+  </target>
+
   <target name="ivy-retrieve-releaseaudit" depends="ivy-resolve-releaseaudit"
     description="Retrieve Ivy-managed artifacts for the compile configurations">
     <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"

Modified: hadoop/common/trunk/conf/hadoop-env.sh.template
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/conf/hadoop-env.sh.template?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/conf/hadoop-env.sh.template (original)
+++ hadoop/common/trunk/conf/hadoop-env.sh.template Fri May 27 16:35:02 2011
@@ -6,7 +6,14 @@
 # remote nodes.
 
 # The java implementation to use.  Required.
-# export JAVA_HOME=/usr/lib/j2sdk1.6-sun
+export JAVA_HOME=${JAVA_HOME}
+
+# Hadoop Installation Prefix
+HADOOP_PREFIX=${HADOOP_PREFIX}
+
+# Hadoop Configuration Directory
+HADOOP_CONF_DIR=${HADOOP_CONF_DIR}
+export HADOOP_CONF_DIR=${HADOOP_CONF_DIR:-$HADOOP_PREFIX/conf}
 
 # Extra Java CLASSPATH elements.  Optional.
 # export HADOOP_CLASSPATH="<extra_entries>:$HADOOP_CLASSPATH"
@@ -30,11 +37,8 @@ export HADOOP_TASKTRACKER_OPTS="-Dcom.su
 # Extra ssh options.  Empty by default.
 # export HADOOP_SSH_OPTS="-o ConnectTimeout=1 -o SendEnv=HADOOP_CONF_DIR"
 
-# Where log files are stored.  $HADOOP_HOME/logs by default.
-# export HADOOP_LOG_DIR=${HADOOP_HOME}/logs
-
-# File naming remote slave hosts.  $HADOOP_HOME/conf/slaves by default.
-# export HADOOP_SLAVES=${HADOOP_HOME}/conf/slaves
+# File naming remote slave hosts.  $HADOOP_PREFIX/conf/slaves by default.
+export HADOOP_SLAVES=${HADOOP_CONF_DIR}/slaves
 
 # host:path where hadoop code should be rsync'd from.  Unset by default.
 # export HADOOP_MASTER=master:/home/$USER/src/hadoop
@@ -45,13 +49,18 @@ export HADOOP_TASKTRACKER_OPTS="-Dcom.su
 # export HADOOP_SLAVE_SLEEP=0.1
 
 # The directory where pid files are stored. /tmp by default.
-# export HADOOP_PID_DIR=/var/hadoop/pids
+HADOOP_PID_DIR=${HADOOP_PID_DIR}
+export HADOOP_PID_DIR=${HADOOP_PID_DIR:-$HADOOP_PREFIX/var/run}
 
 # A string representing this instance of hadoop. $USER by default.
-# export HADOOP_IDENT_STRING=$USER
+export HADOOP_IDENT_STRING=`whoami`
 
 # The scheduling priority for daemon processes.  See 'man nice'.
 # export HADOOP_NICENESS=10
 
 # Allow Hadoop to run with sysctl net.ipv6.bindv6only = 1
 # export HADOOP_ALLOW_IPV6=yes
+
+# Where log files are stored.  $HADOOP_PREFIX/logs by default.
+HADOOP_LOG_DIR=${HADOOP_LOG_DIR}/$HADOOP_IDENT_STRING
+export HADOOP_LOG_DIR=${HADOOP_LOG_DIR:-$HADOOP_PREFIX/var/log}

Modified: hadoop/common/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/ivy.xml (original)
+++ hadoop/common/trunk/ivy.xml Fri May 27 16:35:02 2011
@@ -55,6 +55,9 @@
     <!--Testing pulls in everything-->
    <conf name="test" extends="master" description="the classpath needed to run tests"/>
 
+    <!--Packaging pulls in everything-->
+   <conf name="package" extends="master" description="the classpath needed for packaging"/>
+
     <!--Private configurations. -->
 
     <conf name="javadoc" visibility="private" description="artiracts required while performing doc generation"
@@ -301,6 +304,13 @@
       rev="${jsch.version}"
       conf="common->default">
     </dependency>
+    <!--Configuration: package -->
+    <!--artifacts needed for packaging -->
+    <dependency org="org.vafer" 
+      name="jdeb" 
+      rev="${jdeb.version}"
+      conf="package->master">
+    </dependency>
     <dependency org="commons-configuration"
       name="commons-configuration"
       rev="${commons-configuration.version}"

Modified: hadoop/common/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/libraries.properties?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/ivy/libraries.properties (original)
+++ hadoop/common/trunk/ivy/libraries.properties Fri May 27 16:35:02 2011
@@ -48,6 +48,7 @@ hsqldb.version=1.8.0.10
 ivy.version=2.1.0
 
 jasper.version=5.5.12
+jdeb.version=0.8
 jsp.version=2.1
 jsp-api.version=5.5.12
 jets3t.version=0.7.1

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/cluster_setup.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/cluster_setup.xml Fri May 27 16:35:02 2011
@@ -55,7 +55,7 @@
       <p>通常,集群里的一台机器被指定为 
 	 <code>NameNode</code>,另一台不同的机器被指定为<code>JobTracker</code>。这些机器是<em>masters</em>。余下的机器即作为<code>DataNode</code><em>也</em>作为<code>TaskTracker</code>。这些机器是<em>slaves</em>。</p>
       
-      <p>我们用<code>HADOOP_HOME</code>指代安装的根路径。通常,集群里的所有机器的<code>HADOOP_HOME</code>路径相同。</p>
+      <p>我们用<code>HADOOP_PREFIX</code>指代安装的根路径。通常,集群里的所有机器的<code>HADOOP_PREFIX</code>路径相同。</p>
     </section>
     
     <section>
@@ -335,7 +335,7 @@
         </section>
       </section>
 
-      <p>一但全部必要的配置完成,将这些文件分发到所有机器的<code>HADOOP_CONF_DIR</code>路径下,通常是<code>${HADOOP_HOME}/conf</code>。</p>
+      <p>一但全部必要的配置完成,将这些文件分发到所有机器的<code>HADOOP_CONF_DIR</code>路径下,通常是<code>${HADOOP_PREFIX}/conf</code>。</p>
     </section>
     
     <section>

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/commands_manual.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/commands_manual.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/commands_manual.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/commands_manual.xml Fri May 27 16:35:02 2011
@@ -39,7 +39,7 @@
 			
 			           <tr>
 			          	<td><code>--config confdir</code></td>
-			            <td>覆盖缺省配置目录。缺省是${HADOOP_HOME}/conf。</td>
+			            <td>覆盖缺省配置目录。缺省是${HADOOP_PREFIX}/conf。</td>
 			           </tr>
 			           <tr>
 			          	<td><code>GENERIC_OPTIONS</code></td>

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/hod_admin_guide.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/hod_admin_guide.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/hod_admin_guide.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/hod_admin_guide.xml Fri May 27 16:35:02 2011
@@ -128,7 +128,7 @@
   <ul>
    <li>${JAVA_HOME}:Hadoop的Java的安装位置。Hadoop支持Sun JDK 1.5.x及以上版本。</li>
    <li>${CLUSTER_NAME}:集群名称,由'node property'指定,在资源管理器配置中曾提到过。</li>
-   <li>${HADOOP_HOME}:Hadoop在计算节点和提交节点上的安装位置。</li>
+   <li>${HADOOP_PREFIX}:Hadoop在计算节点和提交节点上的安装位置。</li>
    <li>${RM_QUEUE}:在资源管理器配置中设置的作业提交队列。</li>
    <li>${RM_HOME}:资源管理器在计算节点和提交节点的安装位置。</li>
     </ul>

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/mapred_tutorial.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/mapred_tutorial.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/mapred_tutorial.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/mapred_tutorial.xml Fri May 27 16:35:02 2011
@@ -511,11 +511,11 @@
       <section>
         <title>用法</title>
         
-        <p>假设环境变量<code>HADOOP_HOME</code>对应安装时的根目录,<code>HADOOP_VERSION</code>对应Hadoop的当前安装版本,编译<code>WordCount.java</code>来创建jar包,可如下操作:</p>
+        <p>假设环境变量<code>HADOOP_PREFIX</code>对应安装时的根目录,<code>HADOOP_VERSION</code>对应Hadoop的当前安装版本,编译<code>WordCount.java</code>来创建jar包,可如下操作:</p>
         <p>
           <code>$ mkdir wordcount_classes</code><br/>
           <code>
-            $ javac -classpath ${HADOOP_HOME}/hadoop-${HADOOP_VERSION}-core.jar 
+            $ javac -classpath ${HADOOP_PREFIX}/hadoop-${HADOOP_VERSION}-core.jar 
               -d wordcount_classes WordCount.java
           </code><br/>
           <code>$ jar -cvf /usr/joe/wordcount.jar -C wordcount_classes/ .</code> 

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/quickstart.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/quickstart.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/quickstart.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/quickstart.xml Fri May 27 16:35:02 2011
@@ -192,7 +192,7 @@
 
         <p>Hadoop守护进程的日志写入到 
         <code>${HADOOP_LOG_DIR}</code> 目录 (默认是 
-        <code>${HADOOP_HOME}/logs</code>).</p>
+        <code>${HADOOP_PREFIX}/logs</code>).</p>
 
         <p>浏览NameNode和JobTracker的网络接口,它们的地址默认为:</p>
         <ul>

Modified: hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/streaming.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/streaming.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/streaming.xml (original)
+++ hadoop/common/trunk/src/docs/cn/src/documentation/content/xdocs/streaming.xml Fri May 27 16:35:02 2011
@@ -36,7 +36,7 @@ Hadoop streaming是Hadoop的一
     这些特殊的map/reduce作业是由一些可执行文件或脚本文件充当mapper或者reducer。例如: 
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper /bin/cat \
@@ -68,7 +68,7 @@ Reducer任务运行时ï�
 用户也可以使用java类作为mapper或者reducer。上面的例子与这里的代码等价:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.IdentityMapper \
@@ -88,7 +88,7 @@ $HADOOP_HOME/bin/hadoop  jar $HADOOP_HOM
 如果在集群上还没有,则需要用-file选项让framework把可执行文件作为作业的一部分,一起打包提交。例如:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper myPythonScript.py \
@@ -103,7 +103,7 @@ $HADOOP_HOME/bin/hadoop  jar $HADOOP_HOM
 除了可执行文件外,其他mapper或reducer需要用到的辅助文件(比如字典,配置文件等)也可以用这种方式打包上传。例如:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper myPythonScript.py \
@@ -173,7 +173,7 @@ $HADOOP_HOME/bin/hadoop  jar $HADOOP_HOM
 下面是使用-cacheArchive选项的另一个例子。其中,input.txt文件有两行内容,分别是两个文件的名字:testlink/cache.txt和testlink/cache2.txt。“testlink”是指向档案目录(jar文件解压后的目录)的符号链接,这个目录下有“cache.txt”和“cache2.txt”两个文件。
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
                   -input "/user/me/samples/cachefile/input.txt"  \
                   -mapper "xargs cat"  \
                   -reducer "cat"  \
@@ -220,7 +220,7 @@ This is just the second cache string
 用户可以使用“-jobconf  &lt;n&gt;=&lt;v&gt;”增加一些配置变量。例如:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.IdentityMapper\
@@ -254,7 +254,7 @@ Streaming 作业的其他
 <tr><td> -verbose </td><td> 可选 </td><td> 详细输出 </td></tr>
 </table>
 <p>
-使用-cluster &lt;name&gt;实现“本地”Hadoop和一个或多个远程Hadoop集群间切换。默认情况下,使用hadoop-default.xml和hadoop-site.xml;当使用-cluster &lt;name&gt;选项时,会使用$HADOOP_HOME/conf/hadoop-&lt;name&gt;.xml。
+使用-cluster &lt;name&gt;实现“本地”Hadoop和一个或多个远程Hadoop集群间切换。默认情况下,使用hadoop-default.xml和hadoop-site.xml;当使用-cluster &lt;name&gt;选项时,会使用$HADOOP_PREFIX/conf/hadoop-&lt;name&gt;.xml。
 </p>
 <p>
 下面的选项改变temp目录:
@@ -295,7 +295,7 @@ Streaming 作业的其他
 </p>
 
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.IdentityMapper \
@@ -320,7 +320,7 @@ Hadoop有一个工具ç±
 切分是基于key值的前缀,而不是整个key。例如:
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.IdentityMapper \
@@ -390,7 +390,7 @@ combiner/reducer利用适当
 </p><p>
 要使用Aggregate,只需指定“-reducer aggregate”:</p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper myAggregatorForKeyCount.py \
@@ -434,7 +434,7 @@ Hadoop的工具类org.apache
 同样,工具类中的reduce函数也把输入的key/value对看作字段的列表,用户可以选取任意一段作为reduce输出的key或value。例如: 
 </p>
 <source>
-$HADOOP_HOME/bin/hadoop  jar $HADOOP_HOME/hadoop-streaming.jar \
+$HADOOP_PREFIX/bin/hadoop  jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input myInputDirs \
     -output myOutputDir \
     -mapper org.apache.hadoop.mapred.lib.FieldSelectionMapReduce\
@@ -523,7 +523,7 @@ bruce   70
 charlie 80
 dan     75
 
-$ c2='cut -f2'; $HADOOP_HOME/bin/hadoop jar $HADOOP_HOME/hadoop-streaming.jar \
+$ c2='cut -f2'; $HADOOP_PREFIX/bin/hadoop jar $HADOOP_PREFIX/hadoop-streaming.jar \
     -input /user/me/samples/student_marks 
     -mapper \"$c2\" -reducer 'cat'  
     -output /user/me/samples/student_out 

Modified: hadoop/common/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/common/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml Fri May 27 16:35:02 2011
@@ -64,8 +64,8 @@
       <code>TaskTracker</code>. These are the <em>slaves</em>.</p>
       
       <p>The root of the distribution is referred to as 
-      <code>HADOOP_HOME</code>. All machines in the cluster usually have the same 
-      <code>HADOOP_HOME</code> path.</p>
+      <code>HADOOP_PREFIX</code>. All machines in the cluster usually have the same 
+      <code>HADOOP_PREFIX</code> path.</p>
     </section>
     
     <section>
@@ -1084,7 +1084,7 @@
             on the cluster nodes where a configuration file for the setuid
             executable would be located. The executable would be built to
             <em>build.dir/dist.dir/bin</em> and should be installed to 
-            <em>$HADOOP_HOME/bin</em>.
+            <em>$HADOOP_PREFIX/bin</em>.
             </p>
             
             <p>
@@ -1274,7 +1274,7 @@
       
       <p>Once all the necessary configuration is complete, distribute the files
       to the <code>HADOOP_CONF_DIR</code> directory on all the machines, 
-      typically <code>${HADOOP_HOME}/conf</code>.</p>
+      typically <code>${HADOOP_PREFIX}/conf</code>.</p>
     </section>
     <section>
       <title>Cluster Restartability</title>

Modified: hadoop/common/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml (original)
+++ hadoop/common/trunk/src/docs/src/documentation/content/xdocs/commands_manual.xml Fri May 27 16:35:02 2011
@@ -40,7 +40,7 @@
 			
 			           <tr>
 			          	<td><code>--config confdir</code></td>
-			            <td>Overwrites the default Configuration directory. Default is ${HADOOP_HOME}/conf.</td>
+			            <td>Overwrites the default Configuration directory. Default is ${HADOOP_PREFIX}/conf.</td>
 			           </tr>
 			           <tr>
 			          	<td><code>GENERIC_OPTIONS</code></td>

Added: hadoop/common/trunk/src/docs/src/documentation/content/xdocs/deployment_layout.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/src/documentation/content/xdocs/deployment_layout.xml?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/docs/src/documentation/content/xdocs/deployment_layout.xml (added)
+++ hadoop/common/trunk/src/docs/src/documentation/content/xdocs/deployment_layout.xml Fri May 27 16:35:02 2011
@@ -0,0 +1,147 @@
+<?xml version="1.0"?>
+<!--
+  Copyright 2002-2004 The Apache Software Foundation
+
+  Licensed under the Apache License, Version 2.0 (the "License");
+  you may not use this file except in compliance with the License.
+  You may obtain a copy of the License at
+
+      http://www.apache.org/licenses/LICENSE-2.0
+
+  Unless required by applicable law or agreed to in writing, software
+  distributed under the License is distributed on an "AS IS" BASIS,
+  WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+  See the License for the specific language governing permissions and
+  limitations under the License.
+-->
+
+<!DOCTYPE document PUBLIC "-//APACHE//DTD Documentation V2.0//EN"
+          "http://forrest.apache.org/dtd/document-v20.dtd">
+
+
+<document>
+
+  <header>
+    <title> 
+      Hadoop Deployment Layout
+    </title>
+  </header>
+
+  <body>
+    <section>
+      <title> Introduction </title>
+      <p>
+        This document describes the standard deployment layout for Hadoop.  With increased complexity and evolving Hadoop ecosystem, having standard deployment layout ensures better integration between Hadoop sub-projects.  By making the installation process easier, we can lower the barrier to entry and increase Hadoop adoption.
+      </p>
+    </section>
+
+    <section> 
+      <title> Packages </title>
+        <p>
+          We need to divide Hadoop up into packages that can be independently upgraded.  The list of packages should include:
+        </p>
+        <ul>
+          <li>Hadoop Common - Common including the native code and required jar files.</li>
+          <li>HDFS Client - HDFS jars, scripts, and shared libraries.</li>
+          <li>HDFS Server - jsvc executable</li>
+          <li>Yarn Client - Yarn client jars and scripts</li>
+          <li>Yarn Server - Yarn server jars and scripts</li>
+          <li>MapReduce - MapReduce jars, scripts, and shared libraries</li>
+          <li>LZO - LZ0 codec from github.com/omally/hadoop-gpl-compression</li>
+          <li>Metrics - Plugins for Chukwa and Ganglia</li>
+        </ul>
+        <p>Packages from other teams will include:</p>
+        <ul>
+          <li>Pig</li>
+          <li>Hive</li>
+          <li>Oozie client</li>
+          <li>Oozie server</li>
+          <li>Howl client</li>
+          <li>Howl server</li>
+        </ul>
+        <p>These packages should be deployable with RPM on RedHat.  We also need a package that depends on a version of each of these packages.  In general, we can generate tarballs in the new deployment layout.</p>
+        <p>Note that some packages, like Pig, which are user facing, will have 2 versions installed in a given deployment.  This will be accomplished by modifying the package name and the associated binaries to include the version number.</p>
+        <p>All of the following paths are based on a prefix directory that is the root of the installation.  Our packages must support having multiple Hadoop stack installation on a computer at the same time.  For RPMs, this means that the packages must be relocatable and honor the --prefix option.</p>
+     </section>
+
+ 
+      <section> 
+        <title> Deployment </title>
+        <p>It is important to have a standard deployment that results from installing the packages regardless of the package manager.  Here are the top level directories and a sample of what would be under each.  Note that all of the packages are installed "flattened" into the prefix directory.  For compatibility reasons, we should create "share/hadoop" that matches the old HADOOP_PREFIX and set the HADOOP_PREFIX variable to that.</p>
+        <source>
+        $PREFIX/ bin / hadoop
+               |     | mapred
+               |     | pig -> pig7
+               |     | pig6
+               |     + pig7
+               |
+               + etc / hadoop / core-site.xml
+               |              | hdfs-site.xml
+               |              + mapred-site.xml
+               |
+               + include / hadoop / Pipes.hh
+               |         |        + TemplateFactory.hh
+               |         + hdfs.h
+               |
+               + lib / jni / hadoop-common / libhadoop.so.0.20.0
+               |     |
+               |     | libhdfs.so -> libhdfs.so.0.20.0
+               |     + libhdfs.so.0.20.0
+               |
+               + libexec / task-controller
+               |
+               + man / man1 / hadoop.1
+               |            | mapred.1
+               |            | pig6.1
+               |            + pig7.1
+               |
+               + share / hadoop-common 
+               |       | hadoop-hdfs
+               |       | hadoop-mapreduce
+               |       | pig6
+               |       + pig7
+               |
+               + sbin / hdfs-admin
+               |      | mapred-admin
+               |
+               + src / hadoop-common
+               |     | hadoop-hdfs
+               |     + hadoop-mapreduce
+               |
+               + var / lib / data-node
+                     |     + task-tracker
+                     |
+                     | log / hadoop-datanode
+                     |     + hadoop-tasktracker
+                     |
+                     + run / hadoop-datanode.pid
+                           + hadoop-tasktracker.pid
+        </source>
+        <p>Note that we must continue to honor HADOOP_CONF_DIR to override the configuration location, but that it should default to $prefix/etc.  User facing binaries and scripts go into bin.  Configuration files go into etc with multiple configuration files having a directory.  JNI shared libraries go into lib/jni/$tool since Java does not allow to specify the version of the library to load.  Libraries that aren't loaded via System.loadLibrary are placed directly under lib.  64 bit versions of the libraries for platforms that support them should be placed in lib64.  All of the architecture-independent pieces, including the jars for each tool will be placed in share/$tool.  The default location for all the run time information will be in var.  The storage will be in var/lib, the logs in var/log and the pid files in var/run.</p>
+      </section>
+
+      <section> 
+        <title> Path Configurations </title>
+        <p>Path can be configured at compile phase or installation phase.  For RPM, it takes advantage of the --relocate directive to allow path reconfiguration at install phase.  For Debian package, path is configured at compile phase.
+        </p>
+          <p>Build phase parameter:</p>
+          <ul>
+            <li>package.prefix - Location of package prefix (Default /usr)</li>
+            <li>package.conf.dir - Location of configuration directory (Default /etc/hadoop)</li>
+            <li>package.log.dir - Location of log directory (Default /var/log/hadoop)</li>
+            <li>package.pid.dir - Location of pid directory (Default /var/run/hadoop)</li>
+          </ul>
+
+          <p>Install phase parameter:</p>
+          <source>
+          rpm -i hadoop-[version]-[rev].[arch].rpm \
+              --relocate /usr=/usr/local/hadoop \
+              --relocate /etc/hadoop=/usr/local/etc/hadoop \
+              --relocate /var/log/hadoop=/opt/logs/hadoop \
+              --relocate /var/run/hadoop=/opt/run/hadoop
+          </source>
+      </section>
+
+  </body>
+</document>
+

Modified: hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml (original)
+++ hadoop/common/trunk/src/docs/src/documentation/content/xdocs/single_node_setup.xml Fri May 27 16:35:02 2011
@@ -229,7 +229,7 @@
 
         <p>The hadoop daemon log output is written to the 
         <code>${HADOOP_LOG_DIR}</code> directory (defaults to 
-        <code>${HADOOP_HOME}/logs</code>).</p>
+        <code>${HADOOP_PREFIX}/logs</code>).</p>
 
         <p>Browse the web interface for the NameNode and the JobTracker; by
         default they are available at:</p>

Modified: hadoop/common/trunk/src/docs/src/documentation/content/xdocs/site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/docs/src/documentation/content/xdocs/site.xml?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/docs/src/documentation/content/xdocs/site.xml (original)
+++ hadoop/common/trunk/src/docs/src/documentation/content/xdocs/site.xml Fri May 27 16:35:02 2011
@@ -39,6 +39,7 @@ See http://forrest.apache.org/docs/linki
   </docs>	
 		
  <docs label="Guides">
+		<deployment					label="Deployment Layout" href="deployment_layout.html" />
 		<commands_manual 				label="Hadoop Commands"  href="commands_manual.html" />
 		<fsshell				        label="File System Shell"               href="file_system_shell.html" />
 		<SLA					 	label="Service Level Authorization" 	href="service_level_auth.html"/>

Modified: hadoop/common/trunk/src/native/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/Makefile.am?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/Makefile.am (original)
+++ hadoop/common/trunk/src/native/Makefile.am Fri May 27 16:35:02 2011
@@ -18,7 +18,7 @@
 
 #
 # Notes: 
-# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os-arch}.
+# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os-arch}.
 # 2. This makefile depends on the following environment variables to function correctly:
 #    * HADOOP_NATIVE_SRCDIR 
 #    * JAVA_HOME

Modified: hadoop/common/trunk/src/native/lib/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/lib/Makefile.am?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/lib/Makefile.am (original)
+++ hadoop/common/trunk/src/native/lib/Makefile.am Fri May 27 16:35:02 2011
@@ -22,7 +22,7 @@
 
 #
 # Notes: 
-# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/lib 
+# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/lib 
 # 2. This makefile depends on the following environment variables to function correctly:
 #    * HADOOP_NATIVE_SRCDIR 
 #    * JAVA_HOME

Modified: hadoop/common/trunk/src/native/packageNativeHadoop.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/packageNativeHadoop.sh?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/packageNativeHadoop.sh (original)
+++ hadoop/common/trunk/src/native/packageNativeHadoop.sh Fri May 27 16:35:02 2011
@@ -35,14 +35,14 @@ if [ -d $BASE_NATIVE_LIB_DIR ]
 then
   for platform in `ls $BASE_NATIVE_LIB_DIR`
   do
-    if [ ! -d $DIST_LIB_DIR/$platform ]
+    if [ ! -d $DIST_LIB_DIR ]
     then
-      mkdir -p $DIST_LIB_DIR/$platform
-      echo "Created $DIST_LIB_DIR/$platform"
+      mkdir -p $DIST_LIB_DIR
+      echo "Created $DIST_LIB_DIR"
     fi
-    echo "Copying libraries in $BASE_NATIVE_LIB_DIR/$platform to $DIST_LIB_DIR/$platform/"
-    cd $BASE_NATIVE_LIB_DIR/$platform/
-    $TAR . | (cd $DIST_LIB_DIR/$platform/; $UNTAR)
+    echo "Copying libraries in $BASE_NATIVE_LIB_DIR/$platform to $DIST_LIB_DIR/"
+    cd $BASE_NATIVE_LIB_DIR/
+    $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
   done
 fi
 
@@ -51,14 +51,14 @@ if [ -d $BUILD_NATIVE_DIR ]
 then 
   for platform in `ls $BUILD_NATIVE_DIR`
   do
-    if [ ! -d $DIST_LIB_DIR/$platform ]
+    if [ ! -d $DIST_LIB_DIR ]
     then
-      mkdir -p $DIST_LIB_DIR/$platform
-      echo "Created $DIST_LIB_DIR/$platform"
+      mkdir -p $DIST_LIB_DIR
+      echo "Created $DIST_LIB_DIR"
     fi
-    echo "Copying libraries in $BUILD_NATIVE_DIR/$platform/lib to $DIST_LIB_DIR/$platform/"
+    echo "Copying libraries in $BUILD_NATIVE_DIR/$platform/lib to $DIST_LIB_DIR/"
     cd $BUILD_NATIVE_DIR/$platform/lib
-    $TAR . | (cd $DIST_LIB_DIR/$platform/; $UNTAR)
+    $TAR . | (cd $DIST_LIB_DIR/; $UNTAR)
   done  
 fi
 

Modified: hadoop/common/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am?rev=1128385&r1=1128384&r2=1128385&view=diff
==============================================================================
--- hadoop/common/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am (original)
+++ hadoop/common/trunk/src/native/src/org/apache/hadoop/io/compress/zlib/Makefile.am Fri May 27 16:35:02 2011
@@ -22,7 +22,7 @@
 
 #
 # Notes: 
-# 1. This makefile is designed to do the actual builds in $(HADOOP_HOME)/build/native/${os.name}-${os.arch}/$(subdir) .
+# 1. This makefile is designed to do the actual builds in $(HADOOP_PREFIX)/build/native/${os.name}-${os.arch}/$(subdir) .
 # 2. This makefile depends on the following environment variables to function correctly:
 #    * HADOOP_NATIVE_SRCDIR 
 #    * JAVA_HOME
@@ -31,7 +31,7 @@
 #    * PLATFORM
 #    All these are setup by build.xml and/or the top-level makefile.
 # 3. The creation of requisite jni headers/stubs are also done by build.xml and they are
-#    assumed to be in $(HADOOP_HOME)/build/native/src/org/apache/hadoop/io/compress/zlib.
+#    assumed to be in $(HADOOP_PREFIX)/build/native/src/org/apache/hadoop/io/compress/zlib.
 #
 
 # The 'vpath directive' to locate the actual source files 

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/conffile
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/conffile?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/conffile (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/conffile Fri May 27 16:35:02 2011
@@ -0,0 +1,15 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+/etc/hadoop

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/control
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/control?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/control (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/control Fri May 27 16:35:02 2011
@@ -0,0 +1,24 @@
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+Package: hadoop-common
+Version: @version@
+Section: misc
+Priority: optional
+Provides: hadoop-common
+Architecture: all
+Depends: openjdk-6-jre-headless
+Maintainer: Apache Software Foundation <ge...@hadoop.apache.org>
+Description: The Apache Hadoop project develops open-source software for reliable, scalable, distributed computing.
+Distribution: development

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/postinst
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/postinst?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/postinst (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/postinst Fri May 27 16:35:02 2011
@@ -0,0 +1,24 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bash /usr/sbin/update-hadoop-env.sh \
+  --prefix=/usr \
+  --bin-dir=/usr/bin \
+  --sbin-dir=/usr/sbin \
+  --conf-dir=/etc/hadoop \
+  --log-dir=/var/log/hadoop \
+  --pid-dir=/var/run/hadoop

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/postrm
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/postrm?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/postrm (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/postrm Fri May 27 16:35:02 2011
@@ -0,0 +1,19 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+/usr/sbin/groupdel hadoop 2> /dev/null >dev/null
+exit 0

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/preinst
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/preinst?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/preinst (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/preinst Fri May 27 16:35:02 2011
@@ -0,0 +1,18 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+getent group hadoop 2>/dev/null >/dev/null || /usr/sbin/groupadd -r hadoop

Added: hadoop/common/trunk/src/packages/deb/hadoop.control/prerm
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/hadoop.control/prerm?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/hadoop.control/prerm (added)
+++ hadoop/common/trunk/src/packages/deb/hadoop.control/prerm Fri May 27 16:35:02 2011
@@ -0,0 +1,25 @@
+#!/bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+bash /usr/sbin/update-hadoop-env.sh \
+  --prefix=/usr \
+  --bin-dir=/usr/bin \
+  --sbin-dir=/usr/sbin \
+  --conf-dir=/etc/hadoop \
+  --log-dir=/var/log/hadoop \
+  --pid-dir=/var/run/hadoop \
+  --uninstal

Added: hadoop/common/trunk/src/packages/deb/init.d/hadoop-datanode
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/packages/deb/init.d/hadoop-datanode?rev=1128385&view=auto
==============================================================================
--- hadoop/common/trunk/src/packages/deb/init.d/hadoop-datanode (added)
+++ hadoop/common/trunk/src/packages/deb/init.d/hadoop-datanode Fri May 27 16:35:02 2011
@@ -0,0 +1,142 @@
+#! /bin/sh
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+### BEGIN INIT INFO
+# Provides:		hadoop-datanode	
+# Required-Start:	$remote_fs $syslog
+# Required-Stop:	$remote_fs $syslog
+# Default-Start:	2 3 4 5
+# Default-Stop:		
+# Short-Description:	Apache Hadoop Name Node server
+### END INIT INFO
+
+set -e
+
+# /etc/init.d/hadoop-datanode: start and stop the Apache Hadoop Data Node daemon
+
+test -x /usr/bin/hadoop || exit 0
+( /usr/bin/hadoop 2>&1 | grep -q hadoop ) 2>/dev/null || exit 0
+
+umask 022
+
+if test -f /etc/default/hadoop-env.sh; then
+    . /etc/default/hadoop-env.sh
+fi
+
+. /lib/lsb/init-functions
+
+# Are we running from init?
+run_by_init() {
+    ([ "$previous" ] && [ "$runlevel" ]) || [ "$runlevel" = S ]
+}
+
+check_for_no_start() {
+    # forget it if we're trying to start, and /etc/hadoop/hadoop-datanode_not_to_be_run exists
+    if [ -e /etc/hadoop/hadoop-datanode_not_to_be_run ]; then 
+	if [ "$1" = log_end_msg ]; then
+	    log_end_msg 0
+	fi
+	if ! run_by_init; then
+	    log_action_msg "Apache Hadoop Data Node server not in use (/etc/hadoop/hadoop-datanode_not_to_be_run)"
+	fi
+	exit 0
+    fi
+}
+
+check_privsep_dir() {
+    # Create the PrivSep empty dir if necessary
+    if [ ! -d ${HADOOP_PID_DIR} ]; then
+	mkdir -p ${HADOOP_PID_DIR}
+        chown root:hadoop ${HADOOP_PID_DIR}
+	chmod 0775 ${HADOOP_PID_DIR} 
+    fi
+}
+
+export PATH="${PATH:+$PATH:}/usr/sbin:/usr/bin"
+
+case "$1" in
+  start)
+	check_privsep_dir
+	check_for_no_start
+	log_daemon_msg "Starting Apache Hadoop Data Node server" "hadoop-datanode"
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+  stop)
+	log_daemon_msg "Stopping Apache Hadoop Data Node server" "hadoop-datanode"
+	if start-stop-daemon --stop --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
+	start-stop-daemon --stop --quiet --oknodo --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	check_for_no_start log_end_msg
+	if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+	    log_end_msg 0
+	else
+	    log_end_msg 1
+	fi
+	;;
+
+  try-restart)
+	check_privsep_dir
+	log_daemon_msg "Restarting Apache Hadoop Data Node server" "hadoop-datanode"
+	set +e
+	start-stop-daemon --stop --quiet --retry 30 --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid
+	RET="$?"
+	set -e
+	case $RET in
+	    0)
+		# old daemon stopped
+		check_for_no_start log_end_msg
+		if start-stop-daemon --start --quiet --oknodo --pidfile ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid -c hdfs -x ${HADOOP_PREFIX}/sbin/hadoop-daemon.sh -- --config ${HADOOP_CONF_DIR} start datanode; then
+		    log_end_msg 0
+		else
+		    log_end_msg 1
+		fi
+		;;
+	    1)
+		# daemon not running
+		log_progress_msg "(not running)"
+		log_end_msg 0
+		;;
+	    *)
+		# failed to stop
+		log_progress_msg "(failed to stop)"
+		log_end_msg 1
+		;;
+	esac
+	;;
+
+  status)
+	status_of_proc -p ${HADOOP_PID_DIR}/hadoop-hdfs-datanode.pid ${JAVA_HOME}/bin/java hadoop-datanode && exit 0 || exit $?
+	;;
+
+  *)
+	log_action_msg "Usage: /etc/init.d/hadoop-datanode {start|stop|restart|try-restart|status}"
+	exit 1
+esac
+
+exit 0