You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:19:11 UTC
svn commit: r1077478 - in
/hadoop/common/branches/branch-0.20-security-patches/bin: hadoop
hadoop-daemon.sh
Author: omalley
Date: Fri Mar 4 04:19:11 2011
New Revision: 1077478
URL: http://svn.apache.org/viewvc?rev=1077478&view=rev
Log:
commit fa353840f08503d08008d9b9e9e946b6a00bbc18
Author: Devaraj Das <dd...@yahoo-inc.com>
Date: Thu May 20 15:29:58 2010 -0700
HDFS:1150 from https://issues.apache.org/jira/secure/attachment/12445111/HDFS-1150-BF-Y20-LOG-DIRS-2.patch
+++ b/YAHOO-CHANGES.txt
+ HDFS-1150. Fixes the earlier patch to do logging in the right directory
+ and also adds facility for monitoring processes (via -Dprocname in the
+ command line). (Jakob Homan via ddas)
+
Modified:
hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
hadoop/common/branches/branch-0.20-security-patches/bin/hadoop-daemon.sh
Modified: hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/hadoop?rev=1077478&r1=1077477&r2=1077478&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/hadoop (original)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/hadoop Fri Mar 4 04:19:11 2011
@@ -91,6 +91,14 @@ if [ -f "${HADOOP_CONF_DIR}/hadoop-env.s
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
fi
+# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
+if [ "$COMMAND" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
+ HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
+ HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
+ HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
+ starting_secure_dn="true"
+fi
+
# some Java parameters
if [ "$JAVA_HOME" != "" ]; then
#echo "run java in $JAVA_HOME"
@@ -304,28 +312,22 @@ fi
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.policy.file=$HADOOP_POLICYFILE"
# Check to see if we should start a secure datanode
-if [ "$COMMAND" = "datanode" ]; then
- if [[ $EUID -eq 0 ]]; then
- if [ "$HADOOP_SECURE_DN_USER" = "" ]; then
- HADOOP_SECURE_DN_USER="hdfs"
- fi
-
- if [ "$HADOOP_PID_DIR" = "" ]; then
- HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
- else
- HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
- fi
-
- exec "jsvc" -outfile "$HADOOP_LOG_DIR/jsvc.out" \
- -errfile "$HADOOP_LOG_DIR/jsvc.err" \
- -pidfile "$HADOOP_SECURE_DN_PID" \
- -nodetach \
- -user "$HADOOP_SECURE_DN_USER" \
- -cp "$CLASSPATH" \
- $JAVA_HEAP_MAX $HADOOP_OPTS \
- org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
+if [ "$starting_secure_dn" = "true" ]; then
+ if [ "$HADOOP_PID_DIR" = "" ]; then
+ HADOOP_SECURE_DN_PID="/tmp/hadoop_secure_dn.pid"
+ else
+ HADOOP_SECURE_DN_PID="$HADOOP_PID_DIR/hadoop_secure_dn.pid"
fi
+
+ exec "$HADOOP_HOME/bin/jsvc" -Dproc_$COMMAND -outfile "$HADOOP_LOG_DIR/jsvc.out" \
+ -errfile "$HADOOP_LOG_DIR/jsvc.err" \
+ -pidfile "$HADOOP_SECURE_DN_PID" \
+ -nodetach \
+ -user "$HADOOP_SECURE_DN_USER" \
+ -cp "$CLASSPATH" \
+ $JAVA_HEAP_MAX $HADOOP_OPTS \
+ org.apache.hadoop.hdfs.server.datanode.SecureDataNodeStarter "$@"
else
# run it
- exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
+ exec "$JAVA" -Dproc_$COMMAND $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
fi
Modified: hadoop/common/branches/branch-0.20-security-patches/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/hadoop-daemon.sh?rev=1077478&r1=1077477&r2=1077478&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/hadoop-daemon.sh (original)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/hadoop-daemon.sh Fri Mar 4 04:19:11 2011
@@ -68,20 +68,28 @@ if [ -f "${HADOOP_CONF_DIR}/hadoop-env.s
. "${HADOOP_CONF_DIR}/hadoop-env.sh"
fi
+# Determine if we're starting a secure datanode, and if so, redefine appropriate variables
+if [ "$command" == "datanode" ] && [ "$EUID" -eq 0 ] && [ -n "$HADOOP_SECURE_DN_USER" ]; then
+ export HADOOP_PID_DIR=$HADOOP_SECURE_DN_PID_DIR
+ export HADOOP_LOG_DIR=$HADOOP_SECURE_DN_LOG_DIR
+ export HADOOP_IDENT_STRING=$HADOOP_SECURE_DN_USER
+fi
+
+if [ "$HADOOP_IDENT_STRING" = "" ]; then
+ export HADOOP_IDENT_STRING="$USER"
+fi
+
# get log directory
if [ "$HADOOP_LOG_DIR" = "" ]; then
export HADOOP_LOG_DIR="$HADOOP_HOME/logs"
fi
mkdir -p "$HADOOP_LOG_DIR"
+chown $HADOOP_IDENT_STRING $HADOOP_LOG_DIR
if [ "$HADOOP_PID_DIR" = "" ]; then
HADOOP_PID_DIR=/tmp
fi
-if [ "$HADOOP_IDENT_STRING" = "" ]; then
- export HADOOP_IDENT_STRING="$USER"
-fi
-
# some variables
export HADOOP_LOGFILE=hadoop-$HADOOP_IDENT_STRING-$command-$HOSTNAME.log
export HADOOP_ROOT_LOGGER="INFO,DRFA"