You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dd...@apache.org on 2009/05/08 14:26:26 UTC
svn commit: r772956 [2/2] - in /hadoop/core/trunk: ./ src/contrib/
src/contrib/hdfsproxy/ src/test/core/ src/test/core/org/
src/test/core/org/apache/ src/test/core/org/apache/hadoop/
src/test/core/org/apache/hadoop/cli/ src/test/core/org/apache/hadoop/...
Modified: hadoop/core/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/build.xml?rev=772956&r1=772955&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/build.xml (original)
+++ hadoop/core/trunk/build.xml Fri May 8 12:26:11 2009
@@ -96,8 +96,8 @@
<property name="test.debug.data" value="${test.build.dir}/debug"/>
<property name="test.log.dir" value="${test.build.dir}/logs"/>
<property name="test.build.classes" value="${test.build.dir}/classes"/>
- <property name="test.build.testjar" value="${test.build.dir}/testjar"/>
- <property name="test.build.testshell" value="${test.build.dir}/testshell"/>
+ <property name="test.mapred.build.testjar" value="${test.build.dir}/mapred/testjar"/>
+ <property name="test.mapred.build.testshell" value="${test.build.dir}/mapred/testshell"/>
<property name="test.build.extraconf" value="${test.build.dir}/extraconf"/>
<property name="test.build.javadoc" value="${test.build.dir}/docs/api"/>
<property name="test.build.javadoc.dev" value="${test.build.dir}/docs/dev-api"/>
@@ -111,6 +111,15 @@
<property name="test.junit.haltonfailure" value="no" />
<property name="test.junit.maxmemory" value="512m" />
+ <property name="test.core.build.classes" value="${test.build.dir}/core/classes"/>
+ <property name="test.core.classpath.id" value="test.core.classpath"/>
+ <property name="test.hdfs.build.classes" value="${test.build.dir}/hdfs/classes"/>
+ <property name="test.hdfs.classpath.id" value="test.hdfs.classpath"/>
+ <property name="test.mapred.build.classes" value="${test.build.dir}/mapred/classes"/>
+ <property name="test.mapred.classpath.id" value="test.mapred.classpath"/>
+ <property name="test.hdfs.with.mr.build.classes" value="${test.build.dir}/hdfs-with-mr/classes"/>
+ <property name="test.hdfs.with.mr.classpath.id" value="test.hdfs.with.mr.classpath"/>
+
<property name="test.libhdfs.conf.dir" value="${c++.libhdfs.src}/tests/conf"/>
<property name="test.libhdfs.dir" value="${test.build.dir}/libhdfs"/>
@@ -215,10 +224,15 @@
<path refid="ivy-common.classpath"/>
</path>
- <!-- the unit test classpath: uses test.src.dir for configuration -->
+ <!-- the unit test classpath: uses test.src.dir for configuration
+ Keeping this target as many target depend on this. -->
<path id="test.classpath">
+ <path refid="test.hdfs.with.mr.classpath"/>
+ </path>
+
+ <path id="test.core.classpath">
<pathelement location="${test.build.extraconf}"/>
- <pathelement location="${test.build.classes}" />
+ <pathelement location="${test.core.build.classes}" />
<pathelement location="${test.src.dir}"/>
<pathelement location="${build.dir}"/>
<pathelement location="${build.examples}"/>
@@ -231,6 +245,21 @@
<path refid="classpath"/>
</path>
+ <path id="test.hdfs.classpath">
+ <pathelement location="${test.hdfs.build.classes}" />
+ <path refid="test.core.classpath"/>
+ </path>
+
+ <path id="test.mapred.classpath">
+ <pathelement location="${test.mapred.build.classes}" />
+ <path refid="test.hdfs.classpath"/>
+ </path>
+
+ <path id="test.hdfs.with.mr.classpath">
+ <pathelement location="${test.hdfs.with.mr.build.classes}" />
+ <path refid="test.mapred.classpath"/>
+ </path>
+
<!-- the cluster test classpath: uses conf.dir for configuration -->
<path id="test.cluster.classpath">
<path refid="classpath"/>
@@ -275,8 +304,6 @@
<mkdir dir="${test.build.dir}"/>
<mkdir dir="${test.build.classes}"/>
- <mkdir dir="${test.build.testjar}"/>
- <mkdir dir="${test.build.testshell}"/>
<mkdir dir="${test.build.extraconf}"/>
<tempfile property="touch.temp.file" destDir="${java.io.tmpdir}"/>
<touch millis="0" file="${touch.temp.file}">
@@ -621,98 +648,182 @@
<!-- ================================================================== -->
<!-- Compile test code -->
<!-- ================================================================== -->
- <target name="compile-core-test" depends="compile-examples, compile-tools, generate-test-records">
+ <target name="compile-core-test" depends="compile-core-classes, generate-test-records">
+ <mkdir dir="${test.core.build.classes}"/>
<javac
encoding="${build.encoding}"
srcdir="${test.generated.dir}"
includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
+ destdir="${test.core.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line="${javac.args}" />
- <classpath refid="test.classpath"/>
+ <classpath refid="test.core.classpath"/>
</javac>
<javac
encoding="${build.encoding}"
- srcdir="${test.src.dir}"
+ srcdir="${test.src.dir}/core"
includes="org/apache/hadoop/**/*.java"
- destdir="${test.build.classes}"
+ destdir="${test.core.build.classes}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
+ <classpath refid="test.core.classpath"/>
+ </javac>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/core/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
+
+ </target>
+
+ <target name="compile-hdfs-test" depends="compile-hdfs-classes,compile-core-test">
+
+ <mkdir dir="${test.hdfs.build.classes}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/hdfs"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.hdfs.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.hdfs.classpath"/>
+ </javac>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/testHDFSConf.xml" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
+ </target>
+
+ <target name="compile-mapred-test" depends="compile-examples, compile-hdfs-test">
+
+ <mkdir dir="${test.mapred.build.classes}"/>
+ <mkdir dir="${test.mapred.build.testjar}"/>
+ <mkdir dir="${test.mapred.build.testshell}"/>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/mapred"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.mapred.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+
<javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/testjar"
- includes="*.java"
- destdir="${test.build.testjar}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/mapred/testjar"
+ includes="*.java"
+ destdir="${test.mapred.build.testjar}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
<compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testjar}/testjob.jar"/>
- <jar jarfile="${test.build.testjar}/testjob.jar"
- basedir="${test.build.testjar}">
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+
+ <delete file="${test.mapred.build.testjar}/testjob.jar"/>
+ <jar jarfile="${test.mapred.build.testjar}/testjob.jar"
+ basedir="${test.mapred.build.testjar}">
</jar>
+
<javac
encoding="${build.encoding}"
- srcdir="${test.src.dir}/testshell"
+ srcdir="${test.src.dir}/mapred/testshell"
includes="*.java"
- destdir="${test.build.testshell}"
+ destdir="${test.mapred.build.testshell}"
debug="${javac.debug}"
optimize="${javac.optimize}"
target="${javac.version}"
source="${javac.version}"
deprecation="${javac.deprecation}">
<compilerarg line="${javac.args} ${javac.args.warnings}"/>
- <classpath refid="test.classpath"/>
- </javac>
- <delete file="${test.build.testshell}/testshell.jar"/>
- <jar jarfile="${test.build.testshell}/testshell.jar"
- basedir="${test.build.testshell}">
- </jar>
-
- <delete dir="${test.cache.data}"/>
- <mkdir dir="${test.cache.data}"/>
- <delete dir="${test.debug.data}"/>
- <mkdir dir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-14-dfs-dir.tgz" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/hadoop-dfs-dir.txt" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/testConf.xml" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/testHDFSConf.xml" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/testMRConf.xml" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data15bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data30bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/cli/clitest_data/data120bytes" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV18" todir="${test.cache.data}"/>
- <copy file="${test.src.dir}/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+ <delete file="${test.mapred.build.testshell}/testshell.jar"/>
+ <jar jarfile="${test.mapred.build.testshell}/testshell.jar"
+ basedir="${test.mapred.build.testshell}">
+ </jar>
+
+ <delete dir="${test.cache.data}"/>
+ <mkdir dir="${test.cache.data}"/>
+ <delete dir="${test.debug.data}"/>
+ <mkdir dir="${test.debug.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/testscript.txt" todir="${test.debug.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.txt" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.jar" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.zip" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tgz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/mapred/test.tar.gz" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/testMRConf.xml" todir="${test.cache.data}"/>
+ <copy file="${test.src.dir}/hdfs/org/apache/hadoop/cli/clitest_data/data60bytes" todir="${test.cache.data}"/>
+ </target>
+
+ <target name="compile-hdfs-with-mr-test" depends="compile-mapred-test">
+
+ <mkdir dir="${test.hdfs.with.mr.build.classes}"/>
+
+ <javac
+ encoding="${build.encoding}"
+ srcdir="${test.src.dir}/hdfs-with-mr"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="${test.hdfs.with.mr.build.classes}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}" />
+ <classpath refid="test.mapred.classpath"/>
+ </javac>
+
</target>
+
<!-- ================================================================== -->
<!-- Make hadoop-test.jar -->
<!-- ================================================================== -->
<!-- -->
<!-- ================================================================== -->
- <target name="jar-test" depends="compile-core-test" description="Make hadoop-test.jar">
+ <target name="jar-test" depends="compile-core-test, compile-hdfs-test, compile-mapred-test, compile-hdfs-with-mr-test" description="Make hadoop-test.jar">
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.core.build.classes}"/>
+ </copy>
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.hdfs.build.classes}"/>
+ </copy>
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.mapred.build.classes}"/>
+ </copy>
+ <copy todir="${test.build.classes}">
+ <fileset dir="${test.hdfs.with.mr.build.classes}"/>
+ </copy>
<jar jarfile="${build.dir}/${final.name}-test.jar"
basedir="${test.build.classes}">
<manifest>
@@ -730,7 +841,7 @@
<!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
- <target name="test-core" depends="jar-test" description="Run core unit tests">
+ <target name="run-test-core" depends="compile-core-test" description="Run core unit tests">
<delete dir="${test.build.data}"/>
<mkdir dir="${test.build.data}"/>
@@ -760,29 +871,182 @@
<syspropertyset dynamic="no">
<propertyref name="compile.c++"/>
</syspropertyset>
- <classpath refid="${test.classpath.id}"/>
+ <classpath refid="${test.core.classpath.id}"/>
<formatter type="${test.junit.output.format}" />
<batchtest todir="${test.build.dir}" unless="testcase">
- <fileset dir="${test.src.dir}"
+ <fileset dir="${test.src.dir}/core"
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java" />
</batchtest>
<batchtest todir="${test.build.dir}" if="testcase">
- <fileset dir="${test.src.dir}" includes="**/${testcase}.java"/>
+ <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
</batchtest>
</junit>
- <fail if="tests.failed">Tests failed!</fail>
+ <fail if="tests.failed" unless="continueOnFailure">Tests failed!</fail>
</target>
- <target name="test-contrib" depends="compile, compile-core-test" description="Run contrib unit tests">
+ <target name="run-test-hdfs" depends="compile-hdfs-test" description="Run hdfs unit tests">
+ <delete dir="${test.build.data}"/>
+ <mkdir dir="${test.build.data}"/>
+ <delete dir="${test.log.dir}"/>
+ <mkdir dir="${test.log.dir}"/>
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="${test.build.extraconf}" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}" timeout="${test.timeout}"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.cache.data" value="${test.cache.data}"/>
+ <sysproperty key="test.debug.data" value="${test.debug.data}"/>
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+ <sysproperty key="test.src.dir" value="${test.src.dir}"/>
+ <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="${test.hdfs.classpath.id}"/>
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="${test.build.dir}" unless="testcase">
+ <fileset dir="${test.src.dir}/hdfs"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="testcase">
+ <fileset dir="${test.src.dir}/hdfs" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <fail if="tests.failed" unless="continueOnFailure">Tests failed!</fail>
+ </target>
+
+ <target name="run-test-mapred" depends="compile-mapred-test" description="Run mapred unit tests">
+
+ <delete dir="${test.build.data}"/>
+ <mkdir dir="${test.build.data}"/>
+ <delete dir="${test.log.dir}"/>
+ <mkdir dir="${test.log.dir}"/>
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="${test.build.extraconf}" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}" timeout="${test.timeout}"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.cache.data" value="${test.cache.data}"/>
+ <sysproperty key="test.debug.data" value="${test.debug.data}"/>
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+ <sysproperty key="test.src.dir" value="${test.src.dir}"/>
+ <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="${test.mapred.classpath.id}"/>
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="${test.build.dir}" unless="testcase">
+ <fileset dir="${test.src.dir}/mapred"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="testcase">
+ <fileset dir="${test.src.dir}/mapred" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <fail if="tests.failed" unless="continueOnFailure">Tests failed!</fail>
+ </target>
+
+ <target name="run-test-hdfs-with-mr" depends="compile-hdfs-with-mr-test" description="Run hdfs unit tests that require mapred">
+
+ <delete dir="${test.build.data}"/>
+ <mkdir dir="${test.build.data}"/>
+ <delete dir="${test.log.dir}"/>
+ <mkdir dir="${test.log.dir}"/>
+ <copy file="${test.src.dir}/hadoop-policy.xml"
+ todir="${test.build.extraconf}" />
+ <junit showoutput="${test.output}"
+ printsummary="${test.junit.printsummary}"
+ haltonfailure="${test.junit.haltonfailure}"
+ fork="yes"
+ forkmode="${test.junit.fork.mode}"
+ maxmemory="${test.junit.maxmemory}"
+ dir="${basedir}" timeout="${test.timeout}"
+ errorProperty="tests.failed" failureProperty="tests.failed">
+ <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.cache.data" value="${test.cache.data}"/>
+ <sysproperty key="test.debug.data" value="${test.debug.data}"/>
+ <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
+ <sysproperty key="test.src.dir" value="${test.src.dir}"/>
+ <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
+ <sysproperty key="java.library.path"
+ value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
+ <sysproperty key="install.c++.examples" value="${install.c++.examples}"/>
+ <!-- set compile.c++ in the child jvm only if it is set -->
+ <syspropertyset dynamic="no">
+ <propertyref name="compile.c++"/>
+ </syspropertyset>
+ <classpath refid="${test.hdfs.with.mr.classpath.id}"/>
+ <formatter type="${test.junit.output.format}" />
+ <batchtest todir="${test.build.dir}" unless="testcase">
+ <fileset dir="${test.src.dir}/hdfs-with-mr"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="testcase">
+ <fileset dir="${test.src.dir}/hdfs-with-mr" includes="**/${testcase}.java"/>
+ </batchtest>
+ </junit>
+ <fail if="tests.failed" unless="continueOnFailure">Tests failed!</fail>
+ </target>
+
+ <target name="test-contrib" depends="compile, compile-hdfs-with-mr-test" description="Run contrib unit tests">
<subant target="test">
<property name="version" value="${version}"/>
<property name="clover.jar" value="${clover.jar}"/>
<fileset file="${contrib.dir}/build.xml"/>
</subant>
</target>
-
- <target name="test" depends="test-c++-libhdfs, test-core, test-contrib" description="Run core, contrib unit tests">
+
+ <target name="test-core" description="Run core, hdfs and mapred unit tests">
+ <subant target="run-test-core">
+ <property name="continueOnFailure" value="true"/>
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
+ <subant target="run-test-hdfs">
+ <property name="continueOnFailure" value="true"/>
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
+ <subant target="run-test-mapred">
+ <property name="continueOnFailure" value="true"/>
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
+ <subant target="run-test-hdfs-with-mr">
+ <property name="continueOnFailure" value="true"/>
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
+ </target>
+
+ <target name="test" depends="test-c++-libhdfs, jar-test, test-core" description="Run all unit tests">
+ <subant target="test-contrib">
+ <fileset file="${basedir}/build.xml"/>
+ </subant>
</target>
<!-- Run all unit tests, not just Test*, and use non-test configuration. -->
Modified: hadoop/core/trunk/src/contrib/build-contrib.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/build-contrib.xml?rev=772956&r1=772955&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/build-contrib.xml (original)
+++ hadoop/core/trunk/src/contrib/build-contrib.xml Fri May 8 12:26:11 2009
@@ -93,6 +93,9 @@
<path id="test.classpath">
<pathelement location="${build.test}" />
<pathelement location="${hadoop.root}/build/test/classes"/>
+ <pathelement location="${hadoop.root}/build/test/core/classes"/>
+ <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
+ <pathelement location="${hadoop.root}/build/test/mapred/classes"/>
<pathelement location="${hadoop.root}/src/contrib/test"/>
<pathelement location="${conf.dir}"/>
<pathelement location="${hadoop.root}/build"/>
Modified: hadoop/core/trunk/src/contrib/hdfsproxy/build.xml
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/hdfsproxy/build.xml?rev=772956&r1=772955&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/hdfsproxy/build.xml (original)
+++ hadoop/core/trunk/src/contrib/hdfsproxy/build.xml Fri May 8 12:26:11 2009
@@ -440,6 +440,9 @@
<pathelement location="${proxy.conf.test}" />
<pathelement location="${test.build.dir}" />
<pathelement location="${hadoop.root}/build/test/classes"/>
+ <pathelement location="${hadoop.root}/build/test/core/classes"/>
+ <pathelement location="${hadoop.root}/build/test/hdfs/classes"/>
+ <pathelement location="${hadoop.root}/build/test/mapred/classes"/>
<!--<pathelement location="${hadoop.root}/src/contrib/test"/>-->
<pathelement location="${hadoop.root}/conf"/>
<pathelement location="${hadoop.root}/build"/>
Copied: hadoop/core/trunk/src/test/core/org/apache/hadoop/cli/TestCLI.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestCLI.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/core/org/apache/hadoop/cli/TestCLI.java?p2=hadoop/core/trunk/src/test/core/org/apache/hadoop/cli/TestCLI.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestCLI.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestCLI.java (original)
+++ hadoop/core/trunk/src/test/core/org/apache/hadoop/cli/TestCLI.java Fri May 8 12:26:11 2009
@@ -36,8 +36,6 @@
import org.apache.hadoop.cli.util.CLITestData.TestCmd.CommandType;
import org.apache.hadoop.cli.util.CommandExecutor.Result;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.security.authorize.HadoopPolicyProvider;
-import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
import org.apache.hadoop.util.StringUtils;
import org.xml.sax.Attributes;
@@ -107,8 +105,6 @@
readTestConfigFile();
conf = new Configuration();
- conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
- HadoopPolicyProvider.class, PolicyProvider.class);
conf.setBoolean(ServiceAuthorizationManager.SERVICE_AUTHORIZATION_CONFIG,
true);
Copied: hadoop/core/trunk/src/test/core/org/apache/hadoop/test/CoreTestDriver.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/test/CoreTestDriver.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/core/org/apache/hadoop/test/CoreTestDriver.java?p2=hadoop/core/trunk/src/test/core/org/apache/hadoop/test/CoreTestDriver.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/test/CoreTestDriver.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/test/CoreTestDriver.java (original)
+++ hadoop/core/trunk/src/test/core/org/apache/hadoop/test/CoreTestDriver.java Fri May 8 12:26:11 2009
@@ -19,7 +19,6 @@
package org.apache.hadoop.test;
import org.apache.hadoop.io.TestArrayFile;
-import org.apache.hadoop.io.TestSequenceFile;
import org.apache.hadoop.io.TestSetFile;
import org.apache.hadoop.ipc.TestIPC;
import org.apache.hadoop.ipc.TestRPC;
@@ -39,8 +38,6 @@
public CoreTestDriver(ProgramDriver pgd) {
this.pgd = pgd;
try {
- pgd.addClass("testsequencefile", TestSequenceFile.class,
- "A test for flat files of binary key value pairs.");
pgd.addClass("testsetfile", TestSetFile.class,
"A test for flat files of binary key/value pairs.");
pgd.addClass("testarrayfile", TestArrayFile.class,
Copied: hadoop/core/trunk/src/test/core/org/apache/hadoop/util/TestProcfsBasedProcessTree.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/util/TestProcfsBasedProcessTree.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/core/org/apache/hadoop/util/TestProcfsBasedProcessTree.java?p2=hadoop/core/trunk/src/test/core/org/apache/hadoop/util/TestProcfsBasedProcessTree.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/util/TestProcfsBasedProcessTree.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/util/TestProcfsBasedProcessTree.java (original)
+++ hadoop/core/trunk/src/test/core/org/apache/hadoop/util/TestProcfsBasedProcessTree.java Fri May 8 12:26:11 2009
@@ -18,7 +18,10 @@
package org.apache.hadoop.util;
+import java.io.BufferedReader;
import java.io.File;
+import java.io.FileNotFoundException;
+import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.util.Random;
@@ -29,7 +32,6 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.util.Shell.ExitCodeException;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
-import org.apache.hadoop.mapred.UtilsForTests;
import junit.framework.TestCase;
@@ -84,7 +86,7 @@
}
// read from pidFile
- return UtilsForTests.getPidFromPidFile(pidFile);
+ return getPidFromPidFile(pidFile);
}
public void testProcessTree() {
@@ -184,4 +186,49 @@
.getCumulativeVmem() == 0);
assertTrue(p.toString().equals("[ ]"));
}
+
+ /**
+ * Get PID from a pid-file.
+ *
+ * @param pidFileName
+ * Name of the pid-file.
+ * @return the PID string read from the pid-file. Returns null if the
+ * pidFileName points to a non-existing file or if read fails from the
+ * file.
+ */
+ public static String getPidFromPidFile(String pidFileName) {
+ BufferedReader pidFile = null;
+ FileReader fReader = null;
+ String pid = null;
+
+ try {
+ fReader = new FileReader(pidFileName);
+ pidFile = new BufferedReader(fReader);
+ } catch (FileNotFoundException f) {
+ LOG.debug("PidFile doesn't exist : " + pidFileName);
+ return pid;
+ }
+
+ try {
+ pid = pidFile.readLine();
+ } catch (IOException i) {
+ LOG.error("Failed to read from " + pidFileName);
+ } finally {
+ try {
+ if (fReader != null) {
+ fReader.close();
+ }
+ try {
+ if (pidFile != null) {
+ pidFile.close();
+ }
+ } catch (IOException i) {
+ LOG.warn("Error closing the stream " + pidFile);
+ }
+ } catch (IOException i) {
+ LOG.warn("Error closing the stream " + fReader);
+ }
+ }
+ return pid;
+ }
}
Copied: hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestHDFSCLI.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java?p2=hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestHDFSCLI.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestHDFSCLI.java (original)
+++ hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/cli/TestHDFSCLI.java Fri May 8 12:26:11 2009
@@ -24,8 +24,10 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FsShell;
import org.apache.hadoop.hdfs.DistributedFileSystem;
+import org.apache.hadoop.hdfs.HDFSPolicyProvider;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.tools.DFSAdmin;
+import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.util.ToolRunner;
public class TestHDFSCLI extends TestCLI{
@@ -38,6 +40,9 @@
public void setUp() throws Exception {
super.setUp();
+ conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
+ HDFSPolicyProvider.class, PolicyProvider.class);
+
// Many of the tests expect a replication value of 1 in the output
conf.setInt("dfs.replication", 1);
Copied: hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java?p2=hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java (original)
+++ hadoop/core/trunk/src/test/hdfs/org/apache/hadoop/fs/ftp/TestFTPFileSystem.java Fri May 8 12:26:11 2009
@@ -32,7 +32,6 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
-import org.apache.hadoop.mapred.JobConf;
/**
* Generates a bunch of random files and directories using class 'DFSTestUtil',
@@ -41,7 +40,7 @@
*/
public class TestFTPFileSystem extends TestCase {
- private Configuration defaultConf = new JobConf();
+ private Configuration defaultConf = new Configuration();
private FtpServer server = null;
private FileSystem localFs = null;
private FileSystem ftpFs = null;
Copied: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/cli/TestMRCLI.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestMRCLI.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/cli/TestMRCLI.java?p2=hadoop/core/trunk/src/test/mapred/org/apache/hadoop/cli/TestMRCLI.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestMRCLI.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/cli/TestMRCLI.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/cli/TestMRCLI.java Fri May 8 12:26:11 2009
@@ -25,6 +25,8 @@
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.tools.MRAdmin;
+import org.apache.hadoop.security.authorize.HadoopPolicyProvider;
+import org.apache.hadoop.security.authorize.PolicyProvider;
import org.apache.hadoop.util.ToolRunner;
public class TestMRCLI extends TestHDFSCLI{
@@ -36,6 +38,8 @@
public void setUp() throws Exception {
super.setUp();
+ conf.setClass(PolicyProvider.POLICY_PROVIDER_CONFIG,
+ HadoopPolicyProvider.class, PolicyProvider.class);
JobConf mrConf = new JobConf(conf);
mrCluster = new MiniMRCluster(1, dfsCluster.getFileSystem().getUri().toString(), 1,
null, null, mrConf);
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCommandLineJobSubmission.java Fri May 8 12:26:11 2009
@@ -65,7 +65,7 @@
args[2] = "-libjars";
// the testjob.jar as a temporary jar file
// rather than creating its own
- args[3] = "build/test/testjar/testjob.jar";
+ args[3] = "build/test/mapred/testjar/testjob.jar";
args[4] = input.toString();
args[5] = output.toString();
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCustomOutputCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCustomOutputCommitter.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCustomOutputCommitter.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestCustomOutputCommitter.java Fri May 8 12:26:11 2009
@@ -48,7 +48,7 @@
args[0] = "-libjars";
// the testjob.jar as a temporary jar file
// holding custom output committer
- args[1] = "build/test/testjar/testjob.jar";
+ args[1] = "build/test/mapred/testjar/testjob.jar";
args[2] = "-D";
args[3] = "mapred.output.committer.class=testjar.CustomOutputCommitter";
args[4] = input.toString();
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java Fri May 8 12:26:11 2009
@@ -34,6 +34,7 @@
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ProcessTree;
import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.TestProcfsBasedProcessTree;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -189,7 +190,7 @@
// Checking if the descendant processes of map task are alive
if(ProcessTree.isSetsidAvailable) {
- String childPid = UtilsForTests.getPidFromPidFile(
+ String childPid = TestProcfsBasedProcessTree.getPidFromPidFile(
scriptDirName + "/childPidFile" + 0);
while(childPid == null) {
LOG.warn(scriptDirName + "/childPidFile" + 0 + " is null; Sleeping...");
@@ -199,7 +200,7 @@
LOG.warn("sleep is interrupted:" + ie);
break;
}
- childPid = UtilsForTests.getPidFromPidFile(
+ childPid = TestProcfsBasedProcessTree.getPidFromPidFile(
scriptDirName + "/childPidFile" + 0);
}
@@ -208,7 +209,7 @@
// have been created already(See the script for details).
// Now check if the descendants of map task are alive.
for(int i=0; i <= numLevelsOfSubProcesses; i++) {
- childPid = UtilsForTests.getPidFromPidFile(
+ childPid = TestProcfsBasedProcessTree.getPidFromPidFile(
scriptDirName + "/childPidFile" + i);
LOG.info("pid of the descendant process at level " + i +
"in the subtree of processes(with the map task as the root)" +
@@ -242,7 +243,7 @@
// Checking if the descendant processes of map task are killed properly
if(ProcessTree.isSetsidAvailable) {
for(int i=0; i <= numLevelsOfSubProcesses; i++) {
- String childPid = UtilsForTests.getPidFromPidFile(
+ String childPid = TestProcfsBasedProcessTree.getPidFromPidFile(
scriptDirName + "/childPidFile" + i);
LOG.info("pid of the descendant process at level " + i +
"in the subtree of processes(with the map task as the root)" +
@@ -343,7 +344,7 @@
Runtime.getRuntime()
.exec(shellScript + " " + numLevelsOfSubProcesses);
- String childPid = UtilsForTests.getPidFromPidFile(scriptDir
+ String childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDir
+ "/childPidFile" + 0);
while (childPid == null) {
LOG.warn(scriptDir + "/childPidFile" + 0 + " is null; Sleeping...");
@@ -353,7 +354,7 @@
LOG.warn("sleep is interrupted:" + ie);
break;
}
- childPid = UtilsForTests.getPidFromPidFile(scriptDir
+ childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDir
+ "/childPidFile" + 0);
}
}
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRClasspath.java Fri May 8 12:26:11 2009
@@ -73,7 +73,7 @@
conf.setNumMapTasks(numMaps);
conf.setNumReduceTasks(numReduces);
//pass a job.jar already included in the hadoop build
- conf.setJar("build/test/testjar/testjob.jar");
+ conf.setJar("build/test/mapred/testjar/testjob.jar");
JobClient.runJob(conf);
StringBuffer result = new StringBuffer();
{
@@ -130,7 +130,7 @@
conf.set("mapred.reducer.class", "testjar.ExternalMapperReducer");
//pass a job.jar already included in the hadoop build
- conf.setJar("build/test/testjar/testjob.jar");
+ conf.setJar("build/test/mapred/testjar/testjob.jar");
JobClient.runJob(conf);
StringBuffer result = new StringBuffer();
Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/TestRackAwareTaskPlacement.java Fri May 8 12:26:11 2009
@@ -175,7 +175,7 @@
jobConf.setOutputValueClass(BytesWritable.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(0);
- jobConf.setJar("build/test/testjar/testjob.jar");
+ jobConf.setJar("build/test/mapred/testjar/testjob.jar");
return JobClient.runJob(jobConf);
}
}
Modified: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=772956&r1=772918&r2=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Fri May 8 12:26:11 2009
@@ -295,7 +295,7 @@
jobConf.setInputFormat(RandomInputFormat.class);
jobConf.setNumMapTasks(numMaps);
jobConf.setNumReduceTasks(numRed);
- jobConf.setJar("build/test/testjar/testjob.jar");
+ jobConf.setJar("build/test/mapred/testjar/testjob.jar");
jobConf.set(getTaskSignalParameter(true), mapSignalFilename);
jobConf.set(getTaskSignalParameter(false), redSignalFilename);
}
@@ -686,48 +686,4 @@
fos.close();
}
- /**
- * Get PID from a pid-file.
- *
- * @param pidFileName
- * Name of the pid-file.
- * @return the PID string read from the pid-file. Returns null if the
- * pidFileName points to a non-existing file or if read fails from the
- * file.
- */
- public static String getPidFromPidFile(String pidFileName) {
- BufferedReader pidFile = null;
- FileReader fReader = null;
- String pid = null;
-
- try {
- fReader = new FileReader(pidFileName);
- pidFile = new BufferedReader(fReader);
- } catch (FileNotFoundException f) {
- LOG.debug("PidFile doesn't exist : " + pidFileName);
- return pid;
- }
-
- try {
- pid = pidFile.readLine();
- } catch (IOException i) {
- LOG.error("Failed to read from " + pidFileName);
- } finally {
- try {
- if (fReader != null) {
- fReader.close();
- }
- try {
- if (pidFile != null) {
- pidFile.close();
- }
- } catch (IOException i) {
- LOG.warn("Error closing the stream " + pidFile);
- }
- } catch (IOException i) {
- LOG.warn("Error closing the stream " + fReader);
- }
- }
- return pid;
- }
}
Copied: hadoop/core/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java (from r772918, hadoop/core/trunk/src/test/org/apache/hadoop/test/MapredTestDriver.java)
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java?p2=hadoop/core/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java&p1=hadoop/core/trunk/src/test/org/apache/hadoop/test/MapredTestDriver.java&r1=772918&r2=772956&rev=772956&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/test/MapredTestDriver.java (original)
+++ hadoop/core/trunk/src/test/mapred/org/apache/hadoop/test/MapredTestDriver.java Fri May 8 12:26:11 2009
@@ -18,6 +18,7 @@
package org.apache.hadoop.test;
+import org.apache.hadoop.io.TestSequenceFile;
import org.apache.hadoop.mapred.BigMapOutput;
import org.apache.hadoop.mapred.GenericMRLoadGenerator;
import org.apache.hadoop.mapred.MRBench;
@@ -44,6 +45,8 @@
public MapredTestDriver(ProgramDriver pgd) {
this.pgd = pgd;
try {
+ pgd.addClass("testsequencefile", TestSequenceFile.class,
+ "A test for flat files of binary key value pairs.");
pgd.addClass("threadedmapbench", ThreadedMapBenchmark.class,
"A map/reduce benchmark that compares the performance " +
"of maps with multiple spills over maps with 1 spill");