You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by el...@apache.org on 2011/06/12 05:08:52 UTC
svn commit: r1134856 - in /hadoop/hdfs/trunk: CHANGES.txt bin/hdfs build.xml
src/c++/libhdfs/tests/test-libhdfs.sh
Author: eli
Date: Sun Jun 12 03:08:52 2011
New Revision: 1134856
URL: http://svn.apache.org/viewvc?rev=1134856&view=rev
Log:
HDFS-2063. libhdfs test is broken. Contributed by Eric Yang
Modified:
hadoop/hdfs/trunk/CHANGES.txt
hadoop/hdfs/trunk/bin/hdfs
hadoop/hdfs/trunk/build.xml
hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh
Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=1134856&r1=1134855&r2=1134856&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Sun Jun 12 03:08:52 2011
@@ -724,6 +724,8 @@ Trunk (unreleased changes)
HDFS-2041. OP_CONCAT_DELETE doesn't properly restore modification time
of the concatenated file when edit logs are replayed. (todd)
+ HDFS-2063. libhdfs test is broken. (Eric Yang via eli)
+
Release 0.22.0 - Unreleased
INCOMPATIBLE CHANGES
Modified: hadoop/hdfs/trunk/bin/hdfs
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/bin/hdfs?rev=1134856&r1=1134855&r2=1134856&view=diff
==============================================================================
--- hadoop/hdfs/trunk/bin/hdfs (original)
+++ hadoop/hdfs/trunk/bin/hdfs Sun Jun 12 03:08:52 2011
@@ -125,6 +125,18 @@ if [ -d "$HADOOP_HDFS_HOME/build/tools"
CLASSPATH=${CLASSPATH}:$HADOOP_HDFS_HOME/build/tools
fi
+if [ -d "$HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/common" ]; then
+ for f in $HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/common/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+ done
+fi
+
+if [ -d "$HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/hdfs" ]; then
+ for f in $HADOOP_HDFS_HOME/build/ivy/lib/hadoop-hdfs/hdfs/*.jar; do
+ CLASSPATH=${CLASSPATH}:$f;
+ done
+fi
+
# for releases, add core hdfs jar & webapps to CLASSPATH
if [ -d "$HADOOP_PREFIX/share/hadoop/hdfs/webapps" ]; then
CLASSPATH=${CLASSPATH}:$HADOOP_PREFIX/share/hadoop/hdfs
Modified: hadoop/hdfs/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/build.xml?rev=1134856&r1=1134855&r2=1134856&view=diff
==============================================================================
--- hadoop/hdfs/trunk/build.xml (original)
+++ hadoop/hdfs/trunk/build.xml Sun Jun 12 03:08:52 2011
@@ -1459,6 +1459,7 @@
<env key="JVM_ARCH" value="${jvm.arch}"/>
<env key="LIBHDFS_BUILD_DIR" value="${build.c++.libhdfs}"/>
<env key="HADOOP_PREFIX" value="${basedir}"/>
+ <env key="HADOOP_HDFS_HOME" value="${basedir}"/>
<env key="HADOOP_CONF_DIR" value="${test.libhdfs.dir}/conf"/>
<env key="HADOOP_LOG_DIR" value="${test.libhdfs.dir}/logs"/>
<env key="LIBHDFS_TEST_DIR" value="${test.libhdfs.dir}"/>
@@ -1466,6 +1467,7 @@
<env key="LIBHDFS_INSTALL_DIR" value="${install.c++}/lib"/>
<env key="LIB_DIR" value="${common.ivy.lib.dir}"/>
<env key="CLOVER_JAR" value="${clover.jar}"/>
+ <env key="HADOOP_VERSION" value="${version}"/>
<arg value="test"/>
</exec>
</target>
Modified: hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/c%2B%2B/libhdfs/tests/test-libhdfs.sh?rev=1134856&r1=1134855&r2=1134856&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh (original)
+++ hadoop/hdfs/trunk/src/c++/libhdfs/tests/test-libhdfs.sh Sun Jun 12 03:08:52 2011
@@ -26,6 +26,8 @@
# f) LIBHDFS_INSTALL_DIR
# g) OS_NAME
# h) CLOVER_JAR
+# i} HADOOP_VERSION
+# j) HADOOP_HDFS_HOME
# All these are passed by build.xml.
#
@@ -33,8 +35,8 @@ HDFS_TEST=hdfs_test
HADOOP_LIB_DIR=$HADOOP_PREFIX/lib
HADOOP_BIN_DIR=$HADOOP_PREFIX/bin
-COMMON_BUILD_DIR=$HADOOP_PREFIX/build/ivy/lib/Hadoop-Hdfs/common
-COMMON_JAR=$COMMON_BUILD_DIR/hadoop-common-0.22.0-SNAPSHOT.jar
+COMMON_BUILD_DIR=$HADOOP_PREFIX/build/ivy/lib/hadoop-hdfs/common
+COMMON_JAR=$COMMON_BUILD_DIR/hadoop-common-$HADOOP_VERSION.jar
cat > $HADOOP_CONF_DIR/core-site.xml <<EOF
<?xml version="1.0"?>