You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by cu...@apache.org on 2006/05/12 22:13:20 UTC
svn commit: r405853 - in /lucene/hadoop/trunk: CHANGES.txt bin/hadoop
bin/hadoop-daemon.sh src/java/org/apache/hadoop/util/LogFormatter.java
src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
Author: cutting
Date: Fri May 12 13:13:19 2006
New Revision: 405853
URL: http://svn.apache.org/viewcvs?rev=405853&view=rev
Log:
HADOOP-207, Fix JDK 1.4 incompatible use of System.getenv().
Modified:
lucene/hadoop/trunk/CHANGES.txt
lucene/hadoop/trunk/bin/hadoop
lucene/hadoop/trunk/bin/hadoop-daemon.sh
lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java
lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
Modified: lucene/hadoop/trunk/CHANGES.txt
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/CHANGES.txt?rev=405853&r1=405852&r2=405853&view=diff
==============================================================================
--- lucene/hadoop/trunk/CHANGES.txt (original)
+++ lucene/hadoop/trunk/CHANGES.txt Fri May 12 13:13:19 2006
@@ -8,6 +8,9 @@
2. HADOOP-201. Fix 'bin/hadoop dfs -report'. (cutting)
+ 3. HADOOP-207. Fix JDK 1.4 incompatibility introduced by HADOOP-96.
+ System.getenv() does not work in JDK 1.4. (Hairong Kuang via cutting)
+
Release 0.2.0 - 2006-05-05
Modified: lucene/hadoop/trunk/bin/hadoop
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop?rev=405853&r1=405852&r2=405853&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop (original)
+++ lucene/hadoop/trunk/bin/hadoop Fri May 12 13:13:19 2006
@@ -143,9 +143,13 @@
# cygwin path translation
if expr `uname` : 'CYGWIN*' > /dev/null; then
CLASSPATH=`cygpath -p -w "$CLASSPATH"`
- HADOOP_HOME=`cygpath -p -w "$HADOOP_HOME"`
- HADOOP_LOG_DIR=`cygpath -p -w "$HADOOP_LOG_DIR"`
+ HADOOP_HOME=`cygpath -d "$HADOOP_HOME"`
+ HADOOP_LOG_DIR=`cygpath -d "$HADOOP_LOG_DIR"`
fi
+
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.log.dir=$HADOOP_LOG_DIR"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.home.dir=$HADOOP_HOME"
+HADOOP_OPTS="$HADOOP_OPTS -Dhadoop.id.str=$HADOOP_IDENT_STRING"
# run it
exec "$JAVA" $JAVA_HEAP_MAX $HADOOP_OPTS -classpath "$CLASSPATH" $CLASS "$@"
Modified: lucene/hadoop/trunk/bin/hadoop-daemon.sh
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/bin/hadoop-daemon.sh?rev=405853&r1=405852&r2=405853&view=diff
==============================================================================
--- lucene/hadoop/trunk/bin/hadoop-daemon.sh (original)
+++ lucene/hadoop/trunk/bin/hadoop-daemon.sh Fri May 12 13:13:19 2006
@@ -58,7 +58,7 @@
fi
if [ "$HADOOP_IDENT_STRING" = "" ]; then
- HADOOP_IDENT_STRING=$USER
+ export HADOOP_IDENT_STRING="$USER"
fi
# some variables
Modified: lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java?rev=405853&r1=405852&r2=405853&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java (original)
+++ lucene/hadoop/trunk/src/java/org/apache/hadoop/util/LogFormatter.java Fri May 12 13:13:19 2006
@@ -49,10 +49,10 @@
public static String initFileHandler( Configuration conf, String opName )
throws IOException {
- String logDir=System.getenv("HADOOP_LOG_DIR");
+ String logDir=System.getProperty("hadoop.log.dir");
String userHome=System.getProperty("user.dir");
if( logDir==null ) {
- logDir=System.getenv("HADOOP_HOME");
+ logDir=System.getProperty("hadoop.home.dir");
if(logDir==null) {
logDir=userHome;
} else {
@@ -82,7 +82,9 @@
hostname="localhost";
}
- String logFile = logDir+File.separator+"hadoop-"+System.getProperty( "user.name" )
+ String id = System.getProperty( "hadoop.id.str",
+ System.getProperty("user.name") );
+ String logFile = logDir+File.separator+"hadoop-"+id
+"-"+opName+"-"+hostname+".log";
int logFileSize = conf.getInt( "hadoop.logfile.size", 10000000 );
Modified: lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java
URL: http://svn.apache.org/viewcvs/lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java?rev=405853&r1=405852&r2=405853&view=diff
==============================================================================
--- lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java (original)
+++ lucene/hadoop/trunk/src/test/org/apache/hadoop/dfs/ClusterTestDFSNamespaceLogging.java Fri May 12 13:13:19 2006
@@ -334,7 +334,7 @@
conf.setLong("dfs.blockreport.intervalMsec", 50*1000L);
conf.setLong("dfs.datanode.startupMsec", 15*1000L);
conf.setInt("dfs.replication", 2);
- //System.setProperty("HADOOP_LOG_DIR", baseDirSpecified+"/logs");
+ System.setProperty("hadoop.log.dir", baseDirSpecified+"/logs");
conf.setInt("hadoop.logfile.count", 1);
conf.setInt("hadoop.logfile.size", 1000000000);