You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by to...@apache.org on 2010/06/11 01:01:34 UTC
svn commit: r953490 - in /hadoop/common/trunk: ./ bin/ conf/
Author: tomwhite
Date: Thu Jun 10 23:01:33 2010
New Revision: 953490
URL: http://svn.apache.org/viewvc?rev=953490&view=rev
Log:
HADOOP-6794. Move configuration and script files post split. Includes HDFS-1181, MAPREDUCE-1033.
Removed:
hadoop/common/trunk/bin/hdfs
hadoop/common/trunk/bin/hdfs-config.sh
hadoop/common/trunk/bin/mapred
hadoop/common/trunk/bin/mapred-config.sh
hadoop/common/trunk/bin/start-balancer.sh
hadoop/common/trunk/bin/start-dfs.sh
hadoop/common/trunk/bin/start-mapred.sh
hadoop/common/trunk/bin/stop-balancer.sh
hadoop/common/trunk/bin/stop-dfs.sh
hadoop/common/trunk/bin/stop-mapred.sh
hadoop/common/trunk/conf/capacity-scheduler.xml.template
hadoop/common/trunk/conf/hdfs-site.xml.template
hadoop/common/trunk/conf/mapred-queue-acls.xml.template
hadoop/common/trunk/conf/mapred-site.xml.template
hadoop/common/trunk/conf/taskcontroller.cfg
Modified:
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/bin/hadoop
hadoop/common/trunk/bin/hadoop-config.sh
hadoop/common/trunk/bin/rcc
hadoop/common/trunk/conf/hadoop-policy.xml.template
hadoop/common/trunk/conf/log4j.properties
Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Thu Jun 10 23:01:33 2010
@@ -935,6 +935,8 @@ Release 0.21.0 - Unreleased
HADOOP-6738. Move cluster_setup.xml from MapReduce to Common.
(Tom White via tomwhite)
+ HADOOP-6794. Move configuration and script files post split. (tomwhite)
+
OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a
Modified: hadoop/common/trunk/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop (original)
+++ hadoop/common/trunk/bin/hadoop Thu Jun 10 23:01:33 2010
@@ -53,7 +53,9 @@ case $COMMAND in
echo ""
#try to locate hdfs and if present, delegate to it.
if [ -f "${HADOOP_HDFS_HOME}"/bin/hdfs ]; then
- exec "${HADOOP_HDFS_HOME}"/bin/hdfs $*
+ exec "${HADOOP_HDFS_HOME}"/bin/hdfs $*
+ elif [ -f "${HADOOP_HOME}"/bin/hdfs ]; then
+ exec "${HADOOP_HOME}"/bin/hdfs $*
else
echo "HDFS not found."
exit
@@ -67,7 +69,9 @@ case $COMMAND in
echo ""
#try to locate mapred and if present, delegate to it.
if [ -f "${HADOOP_MAPRED_HOME}"/bin/mapred ]; then
- exec "${HADOOP_MAPRED_HOME}"/bin/mapred $*
+ exec "${HADOOP_MAPRED_HOME}"/bin/mapred $*
+ elif [ -f "${HADOOP_HOME}"/bin/mapred ]; then
+ exec "${HADOOP_HOME}"/bin/mapred $*
else
echo "MAPRED not found."
exit
Modified: hadoop/common/trunk/bin/hadoop-config.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/hadoop-config.sh?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/bin/hadoop-config.sh (original)
+++ hadoop/common/trunk/bin/hadoop-config.sh Thu Jun 10 23:01:33 2010
@@ -31,17 +31,17 @@ while [ -h "$this" ]; do
done
# convert relative path to absolute path
-bin=`dirname "$this"`
+common_bin=`dirname "$this"`
script=`basename "$this"`
-bin=`cd "$bin"; pwd`
-this="$bin/$script"
+common_bin=`cd "$common_bin"; pwd`
+this="$common_bin/$script"
# the root of the Hadoop installation
#TODO: change the env variable when dir structure is changed
export HADOOP_HOME=`dirname "$this"`/..
-export HADOOP_CORE_HOME="${HADOOP_HOME}"
+export HADOOP_COMMON_HOME="${HADOOP_HOME}"
#export HADOOP_HOME=`dirname "$this"`/../..
-#export HADOOP_CORE_HOME="${HADOOP_CORE_HOME:-`dirname "$this"`/..}"
+#export HADOOP_COMMON_HOME="${HADOOP_COMMON_HOME:-`dirname "$this"`/..}"
#check to see if the conf dir is given as an optional argument
if [ $# -gt 1 ]
@@ -106,54 +106,54 @@ CLASSPATH="${HADOOP_CONF_DIR}"
CLASSPATH=${CLASSPATH}:$JAVA_HOME/lib/tools.jar
# for developers, add Hadoop classes to CLASSPATH
-if [ -d "$HADOOP_CORE_HOME/build/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/classes
+if [ -d "$HADOOP_COMMON_HOME/build/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/classes
fi
-if [ -d "$HADOOP_CORE_HOME/build/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build
+if [ -d "$HADOOP_COMMON_HOME/build/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build
fi
-if [ -d "$HADOOP_CORE_HOME/build/test/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/test/classes
+if [ -d "$HADOOP_COMMON_HOME/build/test/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/test/classes
fi
-if [ -d "$HADOOP_CORE_HOME/build/test/core/classes" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME/build/test/core/classes
+if [ -d "$HADOOP_COMMON_HOME/build/test/core/classes" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME/build/test/core/classes
fi
# so that filenames w/ spaces are handled correctly in loops below
IFS=
# for releases, add core hadoop jar & webapps to CLASSPATH
-if [ -d "$HADOOP_CORE_HOME/webapps" ]; then
- CLASSPATH=${CLASSPATH}:$HADOOP_CORE_HOME
+if [ -d "$HADOOP_COMMON_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_COMMON_HOME
fi
-for f in $HADOOP_CORE_HOME/hadoop-core-*.jar; do
+for f in $HADOOP_COMMON_HOME/hadoop-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
# add libs to CLASSPATH
-for f in $HADOOP_CORE_HOME/lib/*.jar; do
+for f in $HADOOP_COMMON_HOME/lib/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
-if [ -d "$HADOOP_CORE_HOME/build/ivy/lib/Hadoop-Core/common" ]; then
-for f in $HADOOP_CORE_HOME/build/ivy/lib/Hadoop-Core/common/*.jar; do
+if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Common/common" ]; then
+for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Common/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
-if [ -d "$HADOOP_CORE_HOME/build/ivy/lib/Hadoop-Hdfs/common" ]; then
-for f in $HADOOP_CORE_HOME/build/ivy/lib/Hadoop-Hdfs/common/*.jar; do
+if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Hdfs/common" ]; then
+for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop-Hdfs/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
-if [ -d "$HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common" ]; then
-for f in $HADOOP_CORE_HOME/build/ivy/lib/Hadoop/common/*.jar; do
+if [ -d "$HADOOP_COMMON_HOME/build/ivy/lib/Hadoop/common" ]; then
+for f in $HADOOP_COMMON_HOME/build/ivy/lib/Hadoop/common/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
fi
-for f in $HADOOP_CORE_HOME/lib/jsp-2.1/*.jar; do
+for f in $HADOOP_COMMON_HOME/lib/jsp-2.1/*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
@@ -180,23 +180,23 @@ unset IFS
# cygwin path translation
if $cygwin; then
- HADOOP_CORE_HOME=`cygpath -w "$HADOOP_CORE_HOME"`
+ HADOOP_COMMON_HOME=`cygpath -w "$HADOOP_COMMON_HOME"`
HADOOP_LOG_DIR=`cygpath -w "$HADOOP_LOG_DIR"`
fi
# setup 'java.library.path' for native-hadoop code if necessary
JAVA_LIBRARY_PATH=''
-if [ -d "${HADOOP_CORE_HOME}/build/native" -o -d "${HADOOP_CORE_HOME}/lib/native" ]; then
+if [ -d "${HADOOP_COMMON_HOME}/build/native" -o -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
JAVA_PLATFORM=`CLASSPATH=${CLASSPATH} ${JAVA} -Xmx32m ${HADOOP_JAVA_PLATFORM_OPTS} org.apache.hadoop.util.PlatformName | sed -e "s/ /_/g"`
- if [ -d "$HADOOP_CORE_HOME/build/native" ]; then
- JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/build/native/${JAVA_PLATFORM}/lib
+ if [ -d "$HADOOP_COMMON_HOME/build/native" ]; then
+ JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/build/native/${JAVA_PLATFORM}/lib
fi
- if [ -d "${HADOOP_CORE_HOME}/lib/native" ]; then
+ if [ -d "${HADOOP_COMMON_HOME}/lib/native" ]; then
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
- JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+ JAVA_LIBRARY_PATH=${JAVA_LIBRARY_PATH}:${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
else
- JAVA_LIBRARY_PATH=${HADOOP_CORE_HOME}/lib/native/${JAVA_PLATFORM}
+ JAVA_LIBRARY_PATH=${HADOOP_COMMON_HOME}/lib/native/${JAVA_PLATFORM}
fi
fi
fi
@@ -208,7 +208,7 @@ fi
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.file=$HADOOP_LOGFILE"
-HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_CORE_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_COMMON_HOME"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.root.logger=${HADOOP_ROOT_LOGGER:-INFO,console}"
if [ "x$JAVA_LIBRARY_PATH" != "x" ]; then
@@ -225,6 +225,15 @@ if [ "$HADOOP_HDFS_HOME" = "" ]; then
fi
if [ -d "${HADOOP_HDFS_HOME}" ]; then
+
+ if [ -d "$HADOOP_HDFS_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME
+ fi
+
+ if [ -d "${HADOOP_HDFS_HOME}/conf" ]; then
+ CLASSPATH=${CLASSPATH}:${HADOOP_HDFS_HOME}/conf
+ fi
+
for f in $HADOOP_HDFS_HOME/hadoop-hdfs-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
@@ -253,6 +262,15 @@ if [ "$HADOOP_MAPRED_HOME" = "" ]; then
fi
if [ -d "${HADOOP_MAPRED_HOME}" ]; then
+
+ if [ -d "$HADOOP_MAPRED_HOME/webapps" ]; then
+ CLASSPATH=${CLASSPATH}:$HADOOP_MAPRED_HOME
+ fi
+
+ if [ -d "${HADOOP_MAPRED_HOME}/conf" ]; then
+ CLASSPATH=${CLASSPATH}:${HADOOP_MAPRED_HOME}/conf
+ fi
+
for f in $HADOOP_MAPRED_HOME/hadoop-mapred-*.jar; do
CLASSPATH=${CLASSPATH}:$f
done
@@ -284,6 +302,3 @@ if $cygwin; then
fi
-# TODO:remove this when dir structure is changed
-#export HADOOP_HDFS_HOME=$HADOOP_HOME
-#export HADOOP_MAPRED_HOME=$HADOOP_HOME
Modified: hadoop/common/trunk/bin/rcc
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/bin/rcc?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/bin/rcc (original)
+++ hadoop/common/trunk/bin/rcc Thu Jun 10 23:01:33 2010
@@ -72,7 +72,7 @@ IFS=
if [ -d "$HADOOP_HOME/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_HOME
fi
-for f in $HADOOP_HOME/hadoop-core-*.jar; do
+for f in $HADOOP_HOME/hadoop-*.jar; do
CLASSPATH=${CLASSPATH}:$f;
done
Modified: hadoop/common/trunk/conf/hadoop-policy.xml.template
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/conf/hadoop-policy.xml.template?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/conf/hadoop-policy.xml.template (original)
+++ hadoop/common/trunk/conf/hadoop-policy.xml.template Thu Jun 10 23:01:33 2010
@@ -94,4 +94,13 @@
A special value of "*" means all users are allowed.</description>
</property>
+ <property>
+ <name>security.admin.operations.protocol.acl</name>
+ <value>*</value>
+ <description>ACL for AdminOperationsProtocol, used by the mradmins commands
+ to refresh queues and nodes at JobTracker. The ACL is a comma-separated list of
+ user and group names. The user and group list is separated by a blank.
+ For e.g. "alice,bob users,wheel". A special value of "*" means all users are
+ allowed.</description>
+ </property>
</configuration>
Modified: hadoop/common/trunk/conf/log4j.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/conf/log4j.properties?rev=953490&r1=953489&r2=953490&view=diff
==============================================================================
--- hadoop/common/trunk/conf/log4j.properties (original)
+++ hadoop/common/trunk/conf/log4j.properties Thu Jun 10 23:01:33 2010
@@ -3,6 +3,16 @@ hadoop.root.logger=INFO,console
hadoop.log.dir=.
hadoop.log.file=hadoop.log
+#
+# Job Summary Appender
+#
+# Use following logger to send summary to separate file defined by
+# hadoop.mapreduce.jobsummary.log.file rolled daily:
+# hadoop.mapreduce.jobsummary.logger=INFO,JSA
+#
+hadoop.mapreduce.jobsummary.logger=${hadoop.root.logger}
+hadoop.mapreduce.jobsummary.log.file=hadoop-mapreduce.jobsummary.log
+
# Define the root logger to the system property "hadoop.root.logger".
log4j.rootLogger=${hadoop.root.logger}, EventCounter
@@ -45,6 +55,7 @@ log4j.appender.console.layout.Conversion
#Default values
hadoop.tasklog.taskid=null
+hadoop.tasklog.iscleanup=false
hadoop.tasklog.noKeepSplits=4
hadoop.tasklog.totalLogFileSize=100
hadoop.tasklog.purgeLogSplits=true
@@ -52,12 +63,12 @@ hadoop.tasklog.logsRetainHours=12
log4j.appender.TLA=org.apache.hadoop.mapred.TaskLogAppender
log4j.appender.TLA.taskId=${hadoop.tasklog.taskid}
+log4j.appender.TLA.isCleanup=${hadoop.tasklog.iscleanup}
log4j.appender.TLA.totalLogFileSize=${hadoop.tasklog.totalLogFileSize}
log4j.appender.TLA.layout=org.apache.log4j.PatternLayout
log4j.appender.TLA.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n
-
#
#Security appender
#
@@ -105,3 +116,34 @@ log4j.logger.org.jets3t.service.impl.res
# Sends counts of logging messages at different severity levels to Hadoop Metrics.
#
log4j.appender.EventCounter=org.apache.hadoop.metrics.jvm.EventCounter
+
+#
+# Job Summary Appender
+#
+log4j.appender.JSA=org.apache.log4j.DailyRollingFileAppender
+log4j.appender.JSA.File=${hadoop.log.dir}/${hadoop.mapreduce.jobsummary.log.file}
+log4j.appender.JSA.layout=org.apache.log4j.PatternLayout
+log4j.appender.JSA.layout.ConversionPattern=%d{yy/MM/dd HH:mm:ss} %p %c{2}: %m%n
+log4j.appender.JSA.DatePattern=.yyyy-MM-dd
+log4j.logger.org.apache.hadoop.mapred.JobInProgress$JobSummary=${hadoop.mapreduce.jobsummary.logger}
+log4j.additivity.org.apache.hadoop.mapred.JobInProgress$JobSummary=false
+
+#
+# MapReduce Audit Log Appender
+#
+
+# Set the MapReduce audit log filename
+#hadoop.mapreduce.audit.log.file=hadoop-mapreduce.audit.log
+
+# Appender for AuditLogger.
+# Requires the following system properties to be set
+# - hadoop.log.dir (Hadoop Log directory)
+# - hadoop.mapreduce.audit.log.file (MapReduce audit log filename)
+
+#log4j.logger.org.apache.hadoop.mapred.AuditLogger=INFO,MRAUDIT
+#log4j.additivity.org.apache.hadoop.mapred.AuditLogger=false
+#log4j.appender.MRAUDIT=org.apache.log4j.DailyRollingFileAppender
+#log4j.appender.MRAUDIT.File=${hadoop.log.dir}/${hadoop.mapreduce.audit.log.file}
+#log4j.appender.MRAUDIT.DatePattern=.yyyy-MM-dd
+#log4j.appender.MRAUDIT.layout=org.apache.log4j.PatternLayout
+#log4j.appender.MRAUDIT.layout.ConversionPattern=%d{ISO8601} %p %c: %m%n