You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by co...@apache.org on 2010/06/21 21:02:51 UTC
svn commit: r956666 [1/4] - in /hadoop/mapreduce/trunk: ./ ivy/
src/test/aop/build/ src/test/mapred/org/apache/hadoop/mapred/
src/test/mapred/testjar/ src/test/system/ src/test/system/aop/
src/test/system/aop/org/ src/test/system/aop/org/apache/ src/te...
Author: cos
Date: Mon Jun 21 19:02:49 2010
New Revision: 956666
URL: http://svn.apache.org/viewvc?rev=956666&view=rev
Log:
MAPREDUCE-1774. Large-scale Automated Framework. Contributed by Sharad Agarwal, Sreekanth, Ramakrishnan, Konstantin Boudnik, at all.
Added:
hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-template.xml
hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-test-template.xml
hadoop/mapreduce/trunk/src/test/mapred/testjar/JobKillCommitter.java
hadoop/mapreduce/trunk/src/test/mapred/testjar/UserNamePermission.java
hadoop/mapreduce/trunk/src/test/system/
hadoop/mapreduce/trunk/src/test/system/aop/
hadoop/mapreduce/trunk/src/test/system/aop/org/
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/MapReducePolicyProviderAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapreduce/
hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapreduce/ClusterAspect.aj
hadoop/mapreduce/trunk/src/test/system/conf/
hadoop/mapreduce/trunk/src/test/system/conf/system-test-mapred.xml
hadoop/mapreduce/trunk/src/test/system/java/
hadoop/mapreduce/trunk/src/test/system/java/org/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapred/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapred/JobInfoImpl.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapred/TTInfoImpl.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapred/TTTaskInfoImpl.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapred/TaskInfoImpl.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/FinishTaskControlAction.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JTClient.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JTProtocol.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/JobInfo.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/MRCluster.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/MRDaemonClient.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTClient.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTInfo.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTProtocol.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TTTaskInfo.java
hadoop/mapreduce/trunk/src/test/system/java/org/apache/hadoop/mapreduce/test/system/TaskInfo.java
hadoop/mapreduce/trunk/src/test/system/test/
hadoop/mapreduce/trunk/src/test/system/test/org/
hadoop/mapreduce/trunk/src/test/system/test/org/apache/
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestCluster.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestControlledJob.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestDistributedCacheModifiedFile.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestDistributedCachePrivateFile.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestDistributedCacheUnModifiedFile.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestFileOwner.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestJobKill.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestPushConfig.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestSortValidate.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestTaskKilling.java
hadoop/mapreduce/trunk/src/test/system/test/org/apache/hadoop/mapred/TestTaskOwner.java
Modified:
hadoop/mapreduce/trunk/CHANGES.txt
hadoop/mapreduce/trunk/build.xml
hadoop/mapreduce/trunk/ivy.xml
hadoop/mapreduce/trunk/ivy/libraries.properties
hadoop/mapreduce/trunk/src/test/aop/build/aop.xml
hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Mon Jun 21 19:02:49 2010
@@ -6,6 +6,9 @@ Trunk (unreleased changes)
NEW FEATURES
+ MAPREDUCE-1774. Large-scale Automated Framework (Sharad Agarwal, Sreekanth
+ Ramakrishnan, Konstantin Boudnik, at all via cos)
+
MAPREDUCE-1804. Stress-test tool for HDFS introduced in HDFS-708.
(Joshua Harlow via shv)
Modified: hadoop/mapreduce/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/build.xml?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/build.xml (original)
+++ hadoop/mapreduce/trunk/build.xml Mon Jun 21 19:02:49 2010
@@ -30,6 +30,7 @@
<property name="Name" value="Hadoop-Mapred"/>
<property name="name" value="hadoop-mapred"/>
+ <!-- Need to change aop.xml project.version prop. synchronously -->
<property name="version" value="0.22.0-SNAPSHOT"/>
<property name="final.name" value="${name}-${version}"/>
<property name="test.final.name" value="${name}-test-${version}"/>
@@ -209,7 +210,7 @@
<property name="hadoop-mapred-test.jar" location="${build.dir}/${test.final.name}.jar" />
<property name="hadoop-mapred-examples.jar" location="${build.dir}/${examples.final.name}.jar" />
<property name="hadoop-mapred-tools.jar" location="${build.dir}/${tools.final.name}.jar" />
-
+ <property name="hadoop-mapred-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="hadoop-mapred-sources.jar" location="${build.dir}/${final.name}-sources.jar" />
<property name="hadoop-mapred-test-sources.jar" location="${build.dir}/${test.final.name}-sources.jar" />
<property name="hadoop-mapred-examples-sources.jar" location="${build.dir}/${examples.final.name}-sources.jar" />
@@ -240,7 +241,7 @@
<path id="test.classpath">
<pathelement location="${test.build.extraconf}"/>
- <pathelement location="${test.core.build.classes}" />
+ <pathelement location="${test.mapred.build.classes}" />
<pathelement location="${test.src.dir}"/>
<pathelement location="${build.dir}"/>
<pathelement location="${build.examples}"/>
@@ -510,19 +511,10 @@
<mkdir dir="${test.mapred.build.testjar}"/>
<mkdir dir="${test.mapred.build.testshell}"/>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/mapred;${test.src.dir}/unit"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.mapred.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
+ <macro-compile-test
+ source.dir="${test.src.dir}/mapred;${test.src.dir}/unit"
+ dest.dir="${test.mapred.build.classes}"
+ classpath="test.classpath"/>
<javac
encoding="${build.encoding}"
@@ -576,6 +568,28 @@
<copy file="${test.src.dir}/mapred/org/apache/hadoop/cli/data60bytes" todir="${test.cache.data}"/>
</target>
+ <macrodef name="macro-compile-test">
+ <attribute name="source.dir"/>
+ <attribute name="dest.dir"/>
+ <attribute name="classpath"/>
+ <sequential>
+ <mkdir dir="@{dest.dir}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="@{source.dir}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="@{dest.dir}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}"/>
+ <classpath refid="@{classpath}"/>
+ </javac>
+ </sequential>
+ </macrodef>
+
<!-- ================================================================== -->
<!-- Make hadoop-mapred-test.jar -->
<!-- ================================================================== -->
@@ -585,6 +599,16 @@
<copy todir="${test.build.classes}">
<fileset dir="${test.mapred.build.classes}"/>
</copy>
+ <subant buildpath="build.xml" target="-do-jar-test"/>
+ <jar jarfile="${hadoop-mapred-test-sources.jar}">
+ <fileset dir="${test.src.dir}/mapred" includes="org/apache/hadoop/**/*.java" />
+ <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
+ <fileset dir="${test.src.dir}/mapred/testjar" includes="*.java" />
+ <fileset dir="${test.src.dir}/mapred/testshell" includes="*.java" />
+ </jar>
+ </target>
+
+ <target name="-do-jar-test">
<jar jarfile="${build.dir}/${test.final.name}.jar"
basedir="${test.build.classes}">
<manifest>
@@ -597,27 +621,24 @@
</section>
</manifest>
</jar>
-
- <jar jarfile="${hadoop-mapred-test-sources.jar}">
- <fileset dir="${test.src.dir}/mapred" includes="org/apache/hadoop/**/*.java" />
- <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
- <fileset dir="${test.src.dir}/mapred/testjar" includes="*.java" />
- <fileset dir="${test.src.dir}/mapred/testshell" includes="*.java" />
- </jar>
</target>
<macrodef name="macro-test-runner">
<attribute name="test.file" />
<attribute name="suite.type" />
+ <attribute name="classpath" />
+ <attribute name="test.dir" />
+ <attribute name="fileset.dir" />
+ <attribute name="hadoop.conf.dir.deployed" default="" />
<sequential>
- <delete dir="${test.build.data}"/>
- <mkdir dir="${test.build.data}"/>
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
+ <delete dir="@{test.dir}/data"/>
+ <mkdir dir="@{test.dir}/data"/>
+ <delete dir="@{test.dir}/logs"/>
+ <mkdir dir="@{test.dir}/logs"/>
<copy file="${test.src.dir}/hadoop-policy.xml"
- todir="${test.build.extraconf}" />
+ todir="@{test.dir}/extraconf" />
<copy file="${test.src.dir}/fi-site.xml"
- todir="${test.build.extraconf}" />
+ todir="@{test.dir}/extraconf" />
<junit showoutput="${test.output}"
printsummary="${test.junit.printsummary}"
haltonfailure="${test.junit.haltonfailure}"
@@ -627,15 +648,15 @@
dir="${basedir}" timeout="${test.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed">
<jvmarg value="${test.junit.jvmargs}" />
- <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.build.data" value="@{test.dir}/data"/>
<sysproperty key="test.tools.input.dir" value = "${test.tools.input.dir}"/>
<sysproperty key="test.cache.data" value="${test.cache.data}"/>
<sysproperty key="test.debug.data" value="${test.debug.data}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}"/>
- <sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
+ <sysproperty key="hadoop.log.dir" value="@{test.dir}/logs"/>
+ <sysproperty key="test.src.dir" value="@{fileset.dir}"/>
+ <sysproperty key="taskcontroller-path" value="${taskcontroller-path}"/>
<sysproperty key="taskcontroller-ugi" value="${taskcontroller-ugi}"/>
- <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf" />
<sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
<sysproperty key="java.library.path"
value="${build.native}/lib:${lib.dir}/native/${build.platform}"/>
@@ -652,36 +673,39 @@
<syspropertyset id="FaultProbabilityProperties">
<propertyref regex="fi.*"/>
</syspropertyset>
-
- <classpath refid="test.classpath"/>
+ <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
+ value="@{hadoop.conf.dir.deployed}" />
+ <classpath refid="@{classpath}"/>
<formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" unless="testcase">
- <fileset dir="${test.src.dir}/@{suite.type}" excludes="**/${test.exclude}.java">
+ <batchtest todir="@{test.dir}" unless="testcase">
+ <fileset dir="@{fileset.dir}/@{suite.type}"
+ excludes="**/${test.exclude}.java aop/** system/**">
<patternset>
<includesfile name="@{test.file}"/>
</patternset>
</fileset>
</batchtest>
- <batchtest todir="${test.build.dir}" if="testcase">
- <fileset dir="${test.src.dir}/mapred" includes="**/${testcase}.java"/>
- <fileset dir="${test.src.dir}/unit" includes="**/${testcase}.java"/>
+ <batchtest todir="@{test.dir}" if="testcase">
+ <fileset dir="@{fileset.dir}/mapred" includes="**/${testcase}.java"/>
+ <fileset dir="@{fileset.dir}/unit" includes="**/${testcase}.java"/>
+ <fileset dir="@{fileset.dir}/system/test" includes="**/${testcase}.java"/>
</batchtest>
<!--batch test to test all the testcases in aop folder with fault
injection-->
- <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
- <fileset dir="${test.src.dir}/aop"
+ <batchtest todir="@{test.dir}" if="tests.notestcase.fi">
+ <fileset dir="@{fileset.dir}/aop"
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java" />
</batchtest>
<!-- batch test for testing a single test case in aop folder with
fault injection-->
- <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
- <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+ <batchtest todir="@{test.dir}" if="tests.testcase.fi">
+ <fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java"/>
</batchtest>
<!--The following batch is for very special occasions only when
a non-FI tests are needed to be executed against FI-environment -->
- <batchtest todir="${test.build.dir}" if="tests.testcaseonly.fi">
- <fileset dir="${test.src.dir}/mapred"
+ <batchtest todir="@{test.dir}" if="tests.testcaseonly.fi">
+ <fileset dir="@{fileset.dir}/mapred"
includes="**/${testcase}.java"/>
</batchtest>
</junit>
@@ -690,15 +714,24 @@
</macrodef>
<target name="run-test-mapred" depends="compile-mapred-test" description="Run mapred functional and system tests">
- <macro-test-runner test.file="${test.mapred.all.tests.file}" suite.type="mapred"/>
+ <macro-test-runner test.file="${test.mapred.all.tests.file}" suite.type="mapred"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
<target name="run-commit-test" depends="compile-mapred-test" description="Run approximate 10-minute set of unit tests prior to commiting">
- <macro-test-runner test.file="${test.mapred.commit.tests.file}" suite.type="mapred"/>
+ <macro-test-runner test.file="${test.mapred.commit.tests.file}" suite.type="mapred"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
<target name="run-test-unit" depends="compile-mapred-test" description="Run unit tests">
- <macro-test-runner test.file="${test.mapred.all.tests.file}" suite.type="unit"/>
+ <macro-test-runner test.file="${test.mapred.all.tests.file}" suite.type="unit"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
@@ -1202,6 +1235,45 @@
</target>
+ <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
+ description="make system test package for deployment">
+ <!--TODO!!!!! fix this shit...-->
+ <copy todir="${system-test-build-dir}/${final.name}">
+ <fileset dir="${dist.dir}">
+ </fileset>
+ </copy>
+ <copy todir="${system-test-build-dir}/${final.name}/conf">
+ <fileset dir="${test.src.dir}/system/conf/"/>
+ </copy>
+ <copy todir="${system-test-build-dir}">
+ <fileset dir="${build.dir}">
+ <include name="${test.final.name}.jar"/>
+ <include name="${examples.final.name}.jar"/>
+ </fileset>
+ </copy>
+ <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-common-${version}.jar"
+ file="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${version}.jar"
+ overwrite="true"/>
+ <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-hdfs-${version}.jar"
+ file="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-hdfs-${herriot.suffix}-${version}.jar"
+ overwrite="true"/>
+ <copy tofile="${system-test-build-dir}/${final.name}/${final.name}.jar"
+ file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
+ <copy tofile="${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
+ file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
+ <macro_tar
+ param.destfile="${system-test-build-dir}/${final.name}-bin.${herriot.suffix}.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${system-test-build-dir}" mode="664">
+ <exclude name="${final.name}/bin/*" />
+ <exclude name="${final.name}/src/**" />
+ <exclude name="${final.name}/docs/**" />
+ <include name="${final.name}/**" />
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
<target name="binary" depends="bin-package" description="Make tarball without source and documentation">
<macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
<param.listofitems>
@@ -1255,7 +1327,8 @@
</artifact:install>
</target>
- <target name="mvn-install" depends="mvn-taskdef,examples,tools,jar-test,set-version">
+ <target name="mvn-install" depends="mvn-taskdef,examples,tools,jar-test,set-version,
+ -mvn-system-install">
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
<artifact:pom file="${hadoop-mapred-examples.pom}" id="hadoop.mapred.examples"/>
@@ -1278,7 +1351,8 @@
</artifact:install>
</target>
- <target name="mvn-deploy" depends="mvn-taskdef, examples, tools, jar-test, set-version">
+ <target name="mvn-deploy" depends="mvn-taskdef, examples, tools, jar-test, set-version,
+ -mvn-system-deploy">
<artifact:pom file="${hadoop-mapred.pom}" id="hadoop.mapred"/>
<artifact:pom file="${hadoop-mapred-test.pom}" id="hadoop.mapred.test"/>
<artifact:pom file="${hadoop-mapred-examples.pom}" id="hadoop.mapred.examples"/>
@@ -1310,12 +1384,20 @@
<target name="set-version">
<delete file="${basedir}/ivy/hadoop-mapred.xml"/>
<delete file="${basedir}/ivy/hadoop-mapred-test.xml"/>
+ <delete file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}.xml"/>
+ <delete file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test.xml"/>
<delete file="${basedir}/ivy/hadoop-mapred-examples.xml"/>
<delete file="${basedir}/ivy/hadoop-mapred-tools.xml"/>
+ <delete file="${hadoop-mapred-instrumented.pom}"/>
<copy file="${basedir}/ivy/hadoop-mapred-template.xml" tofile="${basedir}/ivy/hadoop-mapred.xml"/>
<copy file="${basedir}/ivy/hadoop-mapred-test-template.xml" tofile="${basedir}/ivy/hadoop-mapred-test.xml"/>
+ <copy file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-template.xml"
+ tofile="${basedir}/ivy/hadoop-mapred-${herriot.suffix}.xml"/>
+ <copy file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test-template.xml"
+ tofile="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-test.xml"/>
<copy file="${basedir}/ivy/hadoop-mapred-examples-template.xml" tofile="${basedir}/ivy/hadoop-mapred-examples.xml"/>
<copy file="${basedir}/ivy/hadoop-mapred-tools-template.xml" tofile="${basedir}/ivy/hadoop-mapred-tools.xml"/>
+ <copy file="${basedir}/ivy/hadoop-mapred-${herriot.suffix}-template.xml" tofile="${hadoop-mapred-instrumented.pom}"/>
<replaceregexp byline="true">
<regexp pattern="@version"/>
<substitution expression="${version}"/>
@@ -1344,6 +1426,14 @@
<include name="hadoop-mapred-tools.xml"/>
</fileset>
</replaceregexp>
+ <replaceregexp byline="true">
+ <regexp pattern="@version"/>
+ <substitution expression="${version}"/>
+ <fileset dir="${basedir}/ivy">
+ <include name="hadoop-mapred-${herriot.suffix}.xml"/>
+ <include name="hadoop-mapred-${herriot.suffix}-test.xml"/>
+ </fileset>
+ </replaceregexp>
</target>
<!-- ================================================================== -->
@@ -1387,11 +1477,13 @@
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
- <target name="clean" depends="clean-contrib" description="Clean. Delete the build files, and their directories">
+ <target name="clean" depends="clean-contrib, clean-fi" description="Clean. Delete the build files, and their directories">
<delete dir="${build.dir}"/>
<delete dir="${docs.src}/build"/>
<delete file="${hadoop-mapred.pom}"/>
<delete file="${hadoop-mapred-test.pom}"/>
+ <delete file="${hadoop-mapred-instrumented.pom}"/>
+ <delete file="${hadoop-mapred-instrumented-test.pom}"/>
<delete file="${hadoop-mapred-examples.pom}"/>
<delete file="${hadoop-mapred-tools.pom}"/>
</target>
@@ -1735,6 +1827,11 @@
log="${ivyresolvelog}"/>
</target>
+ <target name="ivy-resolve-system" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="system"
+ log="${ivyresolvelog}"/>
+ </target>
+
<target name="ivy-retrieve" depends="ivy-resolve"
description="Retrieve Ivy-managed artifacts">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
@@ -1790,6 +1887,14 @@
<ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
</target>
+ <target name="ivy-retrieve-system" depends="ivy-resolve-system"
+ description="Retrieve Ivy-managed artifacts for the system tests">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+ log="${ivyresolvelog}"/>
+ <ivy:cachepath pathid="ivy-system.classpath" conf="system"/>
+ </target>
+
<target name="ivy-report" depends="ivy-resolve-releaseaudit"
description="Generate">
<ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
@@ -1885,6 +1990,7 @@
description="Make hadoop-fi.jar">
<macro-jar-fault-inject
target.name="jar"
+ build.dir="${build-fi.dir}"
jar.final.name="final.name"
jar.final.value="${final.name}-fi" />
</target>
Modified: hadoop/mapreduce/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy.xml?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy.xml (original)
+++ hadoop/mapreduce/trunk/ivy.xml Mon Jun 21 19:02:49 2010
@@ -39,6 +39,7 @@
<conf name="common" visibility="private" extends="compile,runtime" description="common artifacts"/>
<conf name="javadoc" visibility="private" description="artiracts required while performing doc generation" extends="common"/>
<conf name="test" extends="common" visibility="private" description="the classpath needed to run tests"/>
+ <conf name="system" extends="test" visibility="private" description="the classpath needed to run system tests"/>
<conf name="test-hdfswithmr" extends="test" visibility="private" description="the classpath needed to run tests"/>
@@ -60,6 +61,10 @@
rev="${hadoop-common.version}" conf="common->default"/>
<dependency org="org.apache.hadoop" name="hadoop-hdfs"
rev="${hadoop-hdfs.version}" conf="common->default"/>
+ <dependency org="org.apache.hadoop" name="hadoop-common-instrumented"
+ rev="${hadoop-common.version}" conf="system->default"/>
+ <dependency org="org.apache.hadoop" name="hadoop-hdfs-instrumented"
+ rev="${hadoop-common.version}" conf="system->default"/>
<dependency org="commons-logging" name="commons-logging"
rev="${commons-logging.version}" conf="common->master"/>
<dependency org="log4j" name="log4j" rev="${log4j.version}"
Added: hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-template.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-template.xml?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-template.xml (added)
+++ hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-template.xml Mon Jun 21 19:02:49 2010
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapred-instrumented</artifactId>
+ <packaging>jar</packaging>
+ <version>@version</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common</artifactId>
+ <version>0.22.0-dev-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+</project>
Added: hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-test-template.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-test-template.xml?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-test-template.xml (added)
+++ hadoop/mapreduce/trunk/ivy/hadoop-mapred-instrumented-test-template.xml Mon Jun 21 19:02:49 2010
@@ -0,0 +1,34 @@
+<?xml version="1.0" encoding="UTF-8"?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapred-test-instrumented</artifactId>
+ <packaging>jar</packaging>
+ <version>@version</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-mapred</artifactId>
+ <version>@version</version>
+ </dependency>
+ </dependencies>
+</project>
Modified: hadoop/mapreduce/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy/libraries.properties?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy/libraries.properties (original)
+++ hadoop/mapreduce/trunk/ivy/libraries.properties Mon Jun 21 19:02:49 2010
@@ -18,6 +18,7 @@ apacheant.version=1.7.1
ant-task.version=2.0.10
#Aspectj depedency for Fault injection
+#This property has to be updated synchronously with aop.xml
aspectj.version=1.6.5
avro.version=1.3.0
Modified: hadoop/mapreduce/trunk/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/aop/build/aop.xml?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/aop/build/aop.xml (original)
+++ hadoop/mapreduce/trunk/src/test/aop/build/aop.xml Mon Jun 21 19:02:49 2010
@@ -14,13 +14,42 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<project name="aspects">
+<project name="aspects"
+ xmlns:artifact="urn:maven-artifact-ant">
+ <!-- The followng are duplications and have to be customized elsewhere too -->
+ <!-- TODO this version has to be updated synchronously with Ivy -->
+ <property name="aspectversion" value="1.6.5"/>
+ <!-- TODO this has to be changed synchronously with build.xml version prop.-->
+ <!-- this workarounds of test-patch setting its own 'version' -->
+ <property name="project.version" value="0.22.0-SNAPSHOT"/>
+
+ <!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
- <property name="aspectversion" value="1.6.5"/>
<property file="${basedir}/build.properties"/>
+ <!-- Properties related to system fault injection and tests -->
+ <property name="herriot.suffix" value="instrumented"/>
+ <property name="system-test-build-dir" value="${build-fi.dir}/system"/>
+ <!-- This varialbe is set by respective injection targets -->
+ <property name="hadoop.instrumented.jars" value=""/>
+ <!-- Properties specifically for system fault-injections and system tests -->
+ <property name="herriot.suffix" value="instrumented"/>
+ <property name="instrumented.final.name"
+ value="${name}-${herriot.suffix}-${version}"/>
+ <property name="hadoop-mapred-instrumented.pom"
+ location="${ivy.dir}/hadoop-mapred-${herriot.suffix}.xml" />
+ <property name="hadoop-mapred-instrumented-test.pom"
+ location="${ivy.dir}/hadoop-mapred-${herriot.suffix}-test.xml" />
+ <property name="hadoop-mapred-instrumented.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
+ <property name="hadoop-mapred-instrumented-sources.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
+ <property name="hadoop-mapred-instrumented-test.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
+ <property name="hadoop-mapred-instrumented-test-sources.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
@@ -39,21 +68,27 @@
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
- <pathelement
+ <pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
+ <path id="aspect.path">
+ <pathelement location="${hadoop.instrumented.jars}"/>
+ </path>
<iajc
encoding="${build.encoding}"
- srcdir="${java.src.dir};${build.src};${test.src.dir}/aop"
+ srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
excludes="org/apache/hadoop/record/**/*"
- destDir="${build.classes}"
+ destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
- deprecation="${javac.deprecation}">
+ deprecation="${javac.deprecation}"
+ fork="true"
+ maxmem="256m">
+ <aspectpath refid="aspect.path"/>
<classpath refid="test.classpath"/>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
@@ -69,15 +104,133 @@
<echo message="Weaving of aspects is finished"/>
</target>
+ <!-- Classpath for running system tests -->
+ <path id="test.system.classpath">
+ <pathelement location="${hadoop.conf.dir.deployed}" />
+ <pathelement location="${system-test-build-dir}/test/extraconf" />
+ <pathelement location="${system-test-build-dir}/test/classes" />
+ <pathelement location="${build-fi.dir}/test/mapred/classes"/>
+ <pathelement location="${system-test-build-dir}/classes" />
+ <pathelement location="${test.src.dir}" />
+ <pathelement location="${build-fi.dir}" />
+ <pathelement location="${build-fi.dir}/tools" />
+ <pathelement path="${clover.jar}" />
+ <fileset dir="${system-test-build-dir}">
+ <include name="**/*.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <path refid="classpath" />
+ </path>
+ <!-- compile system tests... -->
+ <path id="test.system.compile.classpath">
+ <pathelement location="${system-test-build-dir}/test/extraconf" />
+ <pathelement location="${system-test-build-dir}/test/classes" />
+ <pathelement location="${system-test-build-dir}/classes" />
+ <pathelement location="${build-fi.dir}/test/mapred/classes" />
+ <pathelement location="${build-fi.dir}/test/mapred/testjar" />
+ <pathelement location="${build-fi.dir}/tools" />
+ <pathelement location="${build-fi.dir}/examples" />
+ <path refid="ivy-system.classpath"/>
+ </path>
+
+ <!-- ================ -->
+ <!-- run system tests -->
+ <!-- ================ -->
+ <target name="test-system" depends="ivy-retrieve-common, ivy-retrieve-system"
+ description="Run system tests">
+ <subant buildpath="build.xml" target="jar-test-system"/>
+ <macro-test-runner test.file="${test.mapred.all.tests.file}"
+ suite.type="system/test"
+ classpath="test.system.classpath"
+ test.dir="${system-test-build-dir}/test"
+ fileset.dir="${test.src.dir}"
+ hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
+ </macro-test-runner>
+ </target>
+
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
- <subant buildpath="${basedir}" target="compile-fault-inject"
- output="${compile-inject.output}">
+ <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
+ src.dir="${test.src.dir}/aop"
+ aspects.jars="${build-fi.dir}/ivy/lib/${ant.project.name}/test/hadoop-common-${project.version}.jar">
+ </weave-injectfault-aspects>
+ </target>
+
+ <!-- =============================================================== -->
+ <!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
+ <!-- cluster for system tests -->
+ <!-- =============================================================== -->
+ <target name="jar-system"
+ depends="inject-system-faults"
+ description="Make hadoop-mapred-instrumented.jar with system injections.">
+ <macro-jar-fault-inject target.name="jar"
+ build.dir="${system-test-build-dir}"
+ jar.final.name="final.name"
+ jar.final.value="${instrumented.final.name}">
+ </macro-jar-fault-inject>
+ <jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
+ update="yes">
+ <fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
+ <fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
+ </jar>
+ </target>
+
+ <target name="jar-test-system" depends="inject-system-faults, compile-test-system"
+ description="Make hadoop-mapred-instrumented-test.jar with system injections.">
+ <subant buildpath="build.xml" target="-do-jar-test">
+ <property name="build.dir" value="${system-test-build-dir}"/>
+ <property name="test.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
+ <property name="test.build.classes"
+ value="${system-test-build-dir}/test/classes"/>
+ </subant>
+ <jar jarfile="${hadoop-mapred-instrumented-test-sources.jar}">
+ <fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
+ </jar>
+ </target>
+
+ <target name="compile-test-system" description="Compiles system tests">
+ <subant buildpath="build.xml" target="-compile-test-system.wrapper">
+ <property name="build.dir" value="${system-test-build-dir}"/>
+ </subant>
+ </target>
+
+ <target name="-compile-test-system.wrapper" depends="inject-system-faults, ivy-retrieve-common, ivy-retrieve-system">
+ <macro-compile-test
+ source.dir="${test.src.dir}/system/test"
+ dest.dir="${system-test-build-dir}/test/classes"
+ classpath="test.system.compile.classpath"/>
+ </target>
+
+ <macrodef name="weave-injectfault-aspects">
+ <attribute name="dest.dir" />
+ <attribute name="src.dir" />
+ <attribute name="aspects.jars"/>
+ <sequential>
+ <subant buildpath="build.xml" target="compile-fault-inject"
+ output="${compile-inject.output}">
+ <property name="build.dir" value="${build-fi.dir}" />
+ <property name="src.dir.path" value="@{src.dir}" />
+ <property name="dest.dir" value="@{dest.dir}" />
+ <property name="hadoop.instrumented.jars" value="@{aspects.jars}"/>
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <target name="inject-system-faults"
+ description="Inject system faults">
+ <property name="build-fi.dir" value="${system-test-build-dir}" />
+ <mkdir dir="${build-fi.dir}"/>
+ <delete file="${compile-inject.output}"/>
+ <subant buildpath="build.xml" target="ivy-retrieve-system">
<property name="build.dir" value="${build-fi.dir}"/>
</subant>
+ <weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
+ src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
+ aspects.jars="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar">
+ </weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
@@ -99,11 +252,12 @@
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
+ <attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
- <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
@@ -129,4 +283,78 @@
</macrodef>
<!--End of Fault Injection (FI) related session-->
+
+ <!-- Start of cluster controller binary target -->
+ <property name="runAs.src"
+ value ="${test.src.dir}/system/c++/runAs"/>
+ <property name="runAs.build.dir"
+ value="${system-test-build-dir}/c++-build"/>
+ <property name="runAs.configure.script"
+ value="${runAs.build.dir}/configure"/>
+ <target name="init-runAs-build">
+ <condition property="runAs.parameters.passed">
+ <not>
+ <equals arg1="${run-as.hadoop.home.dir}"
+ arg2="$${run-as.hadoop.home.dir}"/>
+ </not>
+ </condition>
+ <fail unless="runAs.parameters.passed"
+ message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
+ <mkdir dir="${runAs.build.dir}"/>
+ <copy todir="${runAs.build.dir}" overwrite="true">
+ <fileset dir="${runAs.src}" includes="**/*"/>
+ </copy>
+ <chmod perm="+x" file="${runAs.configure.script}">
+ </chmod>
+ </target>
+
+ <target name="configure-runAs"
+ depends="init-runAs-build">
+ <exec executable="${runAs.configure.script}"
+ dir="${runAs.build.dir}" failonerror="true">
+ <arg value="--with-home=${run-as.hadoop.home.dir}"/>
+ </exec>
+ </target>
+ <target name="run-as" depends="configure-runAs">
+ <exec executable="${make.cmd}" dir="${runAs.build.dir}"
+ searchpath="yes" failonerror="yes">
+ <arg value="all" />
+ </exec>
+ </target>
+ <!-- End of cluster controller binary target -->
+ <!-- Install Herriot artifacts to the local Maven -->
+ <target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
+ <artifact:pom file="${hadoop-mapred-instrumented.pom}"
+ id="hadoop.mapred.${herriot.suffix}"/>
+ <artifact:pom file="${hadoop-mapred-instrumented-test.pom}"
+ id="hadoop.mapred.${herriot.suffix}.test"/>
+ <artifact:install file="${hadoop-mapred-instrumented.jar}">
+ <pom refid="hadoop.mapred.${herriot.suffix}"/>
+ <attach file="${hadoop-mapred-instrumented-sources.jar}" classifier="sources" />
+ </artifact:install>
+ <artifact:install file="${hadoop-mapred-instrumented-test.jar}">
+ <pom refid="hadoop.mapred.${herriot.suffix}.test"/>
+ <attach file="${hadoop-mapred-instrumented-test-sources.jar}" classifier="sources" />
+ </artifact:install>
+ </target>
+ <target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
+ <property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
+ <artifact:pom file="${hadoop-mapred-instrumented.pom}"
+ id="hadoop.mapred.${herriot.suffix}"/>
+ <artifact:pom file="${hadoop-mapred-instrumented-test.pom}"
+ id="hadoop.mapred.${herriot.suffix}.test"/>
+
+ <artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
+ <artifact:deploy file="${hadoop-mapred-instrumented.jar}">
+ <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+ <pom refid="hadoop.mapred.${herriot.suffix}"/>
+ <attach file="${hadoop-mapred-instrumented-sources.jar}" classifier="sources" />
+ </artifact:deploy>
+ <artifact:deploy file="${hadoop-mapred-instrumented-test.jar}">
+ <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+ <pom refid="hadoop.mapred.${herriot.suffix}.test"/>
+ <attach file="${hadoop-mapred-instrumented-test-sources.jar}" classifier="sources" />
+ </artifact:deploy>
+ </target>
+ <!-- End of Maven -->
</project>
Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java?rev=956666&r1=956665&r2=956666&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/UtilsForTests.java Mon Jun 21 19:02:49 2010
@@ -36,6 +36,7 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
@@ -744,4 +745,38 @@ public class UtilsForTests {
JobTracker jt = new JobTracker();
return jt;
}
+
+ /**
+ * This creates a file in the dfs
+ * @param dfs FileSystem Local File System where file needs to be picked
+ * @param URIPATH Path dfs path where file needs to be copied
+ * @param permission FsPermission File permission
+ * @return returns the DataOutputStream
+ */
+ public static DataOutputStream
+ createTmpFileDFS(FileSystem dfs, Path URIPATH,
+ FsPermission permission, String input) throws Exception {
+ //Creating the path with the file
+ DataOutputStream file =
+ FileSystem.create(dfs, URIPATH, permission);
+ file.writeBytes(input);
+ file.close();
+ return file;
+ }
+
+ /**
+ * This formats the long tasktracker name to just the FQDN
+ * @param taskTrackerLong String The long format of the tasktracker string
+ * @return String The FQDN of the tasktracker
+ * @throws Exception
+ */
+ public static String getFQDNofTT (String taskTrackerLong) throws Exception {
+ //Getting the exact FQDN of the tasktracker from the tasktracker string.
+ String[] firstSplit = taskTrackerLong.split("_");
+ String tmpOutput = firstSplit[1];
+ String[] secondSplit = tmpOutput.split(":");
+ String tmpTaskTracker = secondSplit[0];
+ return tmpTaskTracker;
+ }
+
}
Added: hadoop/mapreduce/trunk/src/test/mapred/testjar/JobKillCommitter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/testjar/JobKillCommitter.java?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/testjar/JobKillCommitter.java (added)
+++ hadoop/mapreduce/trunk/src/test/mapred/testjar/JobKillCommitter.java Mon Jun 21 19:02:49 2010
@@ -0,0 +1,119 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package testjar;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.FileOutputCommitter;
+import org.apache.hadoop.mapred.JobContext;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+
+public class JobKillCommitter {
+ /**
+ * The class provides a overrided implementation of output committer
+ * set up method, which causes the job to fail during set up.
+ */
+ public static class CommitterWithFailSetup extends FileOutputCommitter {
+ @Override
+ public void setupJob(JobContext context) throws IOException {
+ throw new IOException();
+ }
+ }
+
+ /**
+ * The class provides a dummy implementation of outputcommitter
+ * which does nothing
+ */
+ public static class CommitterWithNoError extends FileOutputCommitter {
+ @Override
+ public void setupJob(JobContext context) throws IOException {
+ }
+
+ @Override
+ public void commitJob(JobContext context) throws IOException {
+ }
+ }
+
+ /**
+ * The class provides a overrided implementation of commitJob which
+ * causes the clean up method to fail.
+ */
+ public static class CommitterWithFailCleanup extends FileOutputCommitter {
+ @Override
+ public void commitJob(JobContext context) throws IOException {
+ throw new IOException();
+ }
+ }
+
+ /**
+ * The class is used provides a dummy implementation for mapper method which
+ * does nothing.
+ */
+ public static class MapperPass extends Mapper<LongWritable, Text, Text, Text> {
+ public void map(LongWritable key, Text value, Context context)
+ throws IOException, InterruptedException {
+ }
+ }
+ /**
+ * The class provides a sleep implementation for mapper method.
+ */
+ public static class MapperPassSleep extends
+ Mapper<LongWritable, Text, Text, Text> {
+ public void map(LongWritable key, Text value, Context context)
+ throws IOException, InterruptedException {
+ Thread.sleep(10000);
+ }
+ }
+
+ /**
+ * The class provides a way for the mapper function to fail by
+ * intentionally throwing an IOException
+ */
+ public static class MapperFail extends Mapper<LongWritable, Text, Text, Text> {
+ public void map(LongWritable key, Text value, Context context)
+ throws IOException, InterruptedException {
+ throw new IOException();
+ }
+ }
+
+ /**
+ * The class provides a way for the reduce function to fail by
+ * intentionally throwing an IOException
+ */
+ public static class ReducerFail extends Reducer<Text, Text, Text, Text> {
+ public void reduce(Text key, Iterator<Text> values, Context context)
+ throws IOException, InterruptedException {
+ throw new IOException();
+ }
+ }
+
+ /**
+ * The class provides a empty implementation of reducer method that
+ * does nothing
+ */
+ public static class ReducerPass extends Reducer<Text, Text, Text, Text> {
+ public void reduce(Text key, Iterator<Text> values, Context context)
+ throws IOException, InterruptedException {
+ }
+ }
+}
Added: hadoop/mapreduce/trunk/src/test/mapred/testjar/UserNamePermission.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/testjar/UserNamePermission.java?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/testjar/UserNamePermission.java (added)
+++ hadoop/mapreduce/trunk/src/test/mapred/testjar/UserNamePermission.java Mon Jun 21 19:02:49 2010
@@ -0,0 +1,99 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package testjar;
+
+import java.io.IOException;
+import java.util.Iterator;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.NullWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapred.OutputCollector;
+import org.apache.hadoop.mapred.Reporter;
+
+import org.apache.hadoop.mapreduce.Job;
+import org.apache.hadoop.mapreduce.Mapper;
+import org.apache.hadoop.mapreduce.Reducer;
+import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
+import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
+import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
+
+public class UserNamePermission
+{
+
+ private static final Log LOG = LogFactory.getLog(UserNamePermission.class);
+ //This mapper will read the user name and pass in to the reducer
+ public static class UserNameMapper extends Mapper<LongWritable,Text,Text,Text>
+ {
+ Text key1 = new Text("UserName");
+ public void map(LongWritable key, Text value, Context context)
+ throws IOException,InterruptedException {
+ Text val = new Text(System.getProperty("user.name").toString());
+ context.write(key1, val);
+ }
+ }
+
+ //The reducer is responsible for writing the user name to the file
+ //which will be validated by the testcase
+ public static class UserNameReducer extends Reducer<Text,Text,Text,Text>
+ {
+ public void reduce(Text key, Iterator<Text> values,
+ Context context) throws IOException,InterruptedException {
+
+ LOG.info("The key "+key);
+ if(values.hasNext())
+ {
+ Text val = values.next();
+ LOG.info("The value "+val);
+
+ context.write(key,new Text(System.getProperty("user.name")));
+ }
+
+ }
+ }
+
+ public static void main(String [] args) throws Exception
+ {
+ Path outDir = new Path("output");
+ Configuration conf = new Configuration();
+ Job job = new Job(conf, "user name check");
+
+
+ job.setJarByClass(UserNamePermission.class);
+ job.setMapperClass(UserNamePermission.UserNameMapper.class);
+ job.setCombinerClass(UserNamePermission.UserNameReducer.class);
+ job.setMapOutputKeyClass(Text.class);
+ job.setMapOutputValueClass(Text.class);
+ job.setReducerClass(UserNamePermission.UserNameReducer.class);
+ job.setNumReduceTasks(1);
+
+ job.setInputFormatClass(TextInputFormat.class);
+ TextInputFormat.addInputPath(job, new Path("input"));
+ FileOutputFormat.setOutputPath(job, outDir);
+
+ System.exit(job.waitForCompletion(true) ? 0 : 1);
+ }
+
+}
+
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JTProtocolAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.TTInfo;
+import org.apache.hadoop.mapreduce.test.system.TaskInfo;
+
+/**
+ * Aspect which injects the basic protocol functionality which is to be
+ * implemented by all the services which implement {@link ClientProtocol}
+ *
+ * Aspect also injects default implementation for the {@link JTProtocol}
+ */
+
+public aspect JTProtocolAspect {
+
+ // Make the ClientProtocl extend the JTprotocol
+ declare parents : ClientProtocol extends JTProtocol;
+
+ /*
+ * Start of default implementation of the methods in JTProtocol
+ */
+
+ public Configuration JTProtocol.getDaemonConf() throws IOException {
+ return null;
+ }
+
+ public JobInfo JTProtocol.getJobInfo(JobID jobID) throws IOException {
+ return null;
+ }
+
+ public TaskInfo JTProtocol.getTaskInfo(TaskID taskID) throws IOException {
+ return null;
+ }
+
+ public TTInfo JTProtocol.getTTInfo(String trackerName) throws IOException {
+ return null;
+ }
+
+ public JobInfo[] JTProtocol.getAllJobInfo() throws IOException {
+ return null;
+ }
+
+ public TaskInfo[] JTProtocol.getTaskInfo(JobID jobID) throws IOException {
+ return null;
+ }
+
+ public TTInfo[] JTProtocol.getAllTTInfo() throws IOException {
+ return null;
+ }
+
+ public boolean JTProtocol.isJobRetired(JobID jobID) throws IOException {
+ return false;
+ }
+
+ public String JTProtocol.getJobHistoryLocationForRetiredJob(JobID jobID) throws IOException {
+ return "";
+ }
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobClientAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,35 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
+
+public privileged aspect JobClientAspect {
+
+ public ClientProtocol JobClient.getProtocol() {
+ return cluster.getClientProtocol();
+ }
+
+ public void JobClient.killJob(JobID id) throws IOException,InterruptedException {
+ cluster.getClientProtocol().killJob(
+ org.apache.hadoop.mapred.JobID.downgrade(id));
+ }
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobInProgressAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,73 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistory;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+
+/**
+ * Aspect to add a utility method in the JobInProgress for easing up the
+ * construction of the JobInfo object.
+ */
+privileged aspect JobInProgressAspect {
+
+ /**
+ * Returns a read only view of the JobInProgress object which is used by the
+ * client.
+ *
+ * @return JobInfo of the current JobInProgress object
+ */
+ public JobInfo JobInProgress.getJobInfo() {
+ String historyLoc = getHistoryPath();
+ boolean isHistoryFileCopied =
+ this.status.getHistoryFile() == null ? false : true;
+ if (tasksInited.get()) {
+ return new JobInfoImpl(
+ this.getJobID(), this.isSetupLaunched(), this.isSetupFinished(), this
+ .isCleanupLaunched(), this.runningMaps(), this.runningReduces(),
+ this.pendingMaps(), this.pendingReduces(), this.finishedMaps(), this
+ .finishedReduces(), this.getStatus(), historyLoc, this
+ .getBlackListedTrackers(), false, this.numMapTasks,
+ this.numReduceTasks, isHistoryFileCopied);
+ } else {
+ return new JobInfoImpl(
+ this.getJobID(), false, false, false, 0, 0, this.pendingMaps(), this
+ .pendingReduces(), this.finishedMaps(), this.finishedReduces(),
+ this.getStatus(), historyLoc, this.getBlackListedTrackers(), this
+ .isComplete(), this.numMapTasks, this.numReduceTasks, false);
+ }
+ }
+
+ private String JobInProgress.getHistoryPath() {
+ String historyLoc = "";
+ if (this.isComplete()) {
+ historyLoc = this.getStatus().getHistoryFile();
+ } else {
+ Path jobHistoryDirectory = this.jobHistory.getJobHistoryLocation();
+ Path historypath =
+ JobHistory.getJobHistoryFile(
+ jobHistoryDirectory, this.getJobID(), this.profile.getUser());
+ historyLoc = historypath.toString();
+ }
+ return historyLoc;
+ }
+
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/JobTrackerAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,221 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import java.util.Set;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
+import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.TaskID;
+import org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker;
+import org.apache.hadoop.mapreduce.test.system.JTProtocol;
+import org.apache.hadoop.mapreduce.test.system.JobInfo;
+import org.apache.hadoop.mapreduce.test.system.TTInfo;
+import org.apache.hadoop.mapreduce.test.system.TaskInfo;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+/**
+ * Aspect class which injects the code for {@link JobTracker} class.
+ *
+ */
+public privileged aspect JobTrackerAspect {
+
+
+ public Configuration JobTracker.getDaemonConf() throws IOException {
+ return conf;
+ }
+ /**
+ * Method to get the read only view of the job and its associated information.
+ *
+ * @param jobID
+ * id of the job for which information is required.
+ * @return JobInfo of the job requested
+ * @throws IOException
+ */
+ public JobInfo JobTracker.getJobInfo(JobID jobID) throws IOException {
+ JobInProgress jip = jobs.get(org.apache.hadoop.mapred.JobID
+ .downgrade(jobID));
+ if (jip == null) {
+ LOG.warn("No job present for : " + jobID);
+ return null;
+ }
+ JobInfo info;
+ synchronized (jip) {
+ info = jip.getJobInfo();
+ }
+ return info;
+ }
+
+ /**
+ * Method to get the read only view of the task and its associated
+ * information.
+ *
+ * @param taskID
+ * @return
+ * @throws IOException
+ */
+ public TaskInfo JobTracker.getTaskInfo(TaskID taskID) throws IOException {
+ TaskInProgress tip = getTip(org.apache.hadoop.mapred.TaskID
+ .downgrade(taskID));
+
+ if (tip == null) {
+ LOG.warn("No task present for : " + taskID);
+ return null;
+ }
+ return getTaskInfo(tip);
+ }
+
+ public TTInfo JobTracker.getTTInfo(String trackerName) throws IOException {
+ org.apache.hadoop.mapreduce.server.jobtracker.TaskTracker tt = taskTrackers
+ .get(trackerName);
+ if (tt == null) {
+ LOG.warn("No task tracker with name : " + trackerName + " found");
+ return null;
+ }
+ TaskTrackerStatus status = tt.getStatus();
+ TTInfo info = new TTInfoImpl(status.trackerName, status);
+ return info;
+ }
+
+ // XXX Below two method don't reuse getJobInfo and getTaskInfo as there is a
+ // possibility that retire job can run and remove the job from JT memory
+ // during
+ // processing of the RPC call.
+ public JobInfo[] JobTracker.getAllJobInfo() throws IOException {
+ List<JobInfo> infoList = new ArrayList<JobInfo>();
+ synchronized (jobs) {
+ for (JobInProgress jip : jobs.values()) {
+ JobInfo info = jip.getJobInfo();
+ infoList.add(info);
+ }
+ }
+ return (JobInfo[]) infoList.toArray(new JobInfo[infoList.size()]);
+ }
+
+ public TaskInfo[] JobTracker.getTaskInfo(JobID jobID) throws IOException {
+ JobInProgress jip = jobs.get(org.apache.hadoop.mapred.JobID
+ .downgrade(jobID));
+ if (jip == null) {
+ LOG.warn("Unable to find job : " + jobID);
+ return null;
+ }
+ List<TaskInfo> infoList = new ArrayList<TaskInfo>();
+ synchronized (jip) {
+ for (TaskInProgress tip : jip.setup) {
+ infoList.add(getTaskInfo(tip));
+ }
+ for (TaskInProgress tip : jip.maps) {
+ infoList.add(getTaskInfo(tip));
+ }
+ for (TaskInProgress tip : jip.reduces) {
+ infoList.add(getTaskInfo(tip));
+ }
+ for (TaskInProgress tip : jip.cleanup) {
+ infoList.add(getTaskInfo(tip));
+ }
+ }
+ return (TaskInfo[]) infoList.toArray(new TaskInfo[infoList.size()]);
+ }
+
+ public TTInfo[] JobTracker.getAllTTInfo() throws IOException {
+ List<TTInfo> infoList = new ArrayList<TTInfo>();
+ synchronized (taskTrackers) {
+ for (TaskTracker tt : taskTrackers.values()) {
+ TaskTrackerStatus status = tt.getStatus();
+ TTInfo info = new TTInfoImpl(status.trackerName, status);
+ infoList.add(info);
+ }
+ }
+ return (TTInfo[]) infoList.toArray(new TTInfo[infoList.size()]);
+ }
+
+ public boolean JobTracker.isJobRetired(JobID id) throws IOException {
+ return retireJobs.get(
+ org.apache.hadoop.mapred.JobID.downgrade(id))!=null?true:false;
+ }
+
+ public String JobTracker.getJobHistoryLocationForRetiredJob(
+ JobID id) throws IOException {
+ String historyFile = this.getJobStatus(id).getHistoryFile();
+ if(historyFile == null) {
+ throw new IOException("The retired job information for the job : "
+ + id +" is not found");
+ } else {
+ return historyFile;
+ }
+ }
+ pointcut getVersionAspect(String protocol, long clientVersion) :
+ execution(public long JobTracker.getProtocolVersion(String ,
+ long) throws IOException) && args(protocol, clientVersion);
+
+ long around(String protocol, long clientVersion) :
+ getVersionAspect(protocol, clientVersion) {
+ if (protocol.equals(DaemonProtocol.class.getName())) {
+ return DaemonProtocol.versionID;
+ } else if (protocol.equals(JTProtocol.class.getName())) {
+ return JTProtocol.versionID;
+ } else {
+ return proceed(protocol, clientVersion);
+ }
+ }
+
+ /**
+ * Point cut which monitors for the start of the jobtracker and sets the right
+ * value if the jobtracker is started.
+ */
+ pointcut jtConstructorPointCut() :
+ call(JobTracker.new(..));
+
+ after() returning (JobTracker tracker): jtConstructorPointCut() {
+ try {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ tracker.setUser(ugi.getShortUserName());
+ } catch (IOException e) {
+ tracker.LOG.warn("Unable to get the user information for the "
+ + "Jobtracker");
+ }
+ tracker.setReady(true);
+ }
+
+ private TaskInfo JobTracker.getTaskInfo(TaskInProgress tip) {
+ TaskStatus[] status = tip.getTaskStatuses();
+ if (status == null) {
+ if (tip.isMapTask()) {
+ status = new MapTaskStatus[]{};
+ }
+ else {
+ status = new ReduceTaskStatus[]{};
+ }
+ }
+ String[] trackers =
+ (String[]) (tip.getActiveTasks().values()).toArray(new String[tip
+ .getActiveTasks().values().size()]);
+ TaskInfo info =
+ new TaskInfoImpl(tip.getTIPId(), tip.getProgress(), tip
+ .getActiveTasks().size(), tip.numKilledTasks(), tip
+ .numTaskFailures(), status, (tip.isJobSetupTask() || tip
+ .isJobCleanupTask()), trackers);
+ return info;
+ }
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/MapReducePolicyProviderAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/MapReducePolicyProviderAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/MapReducePolicyProviderAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/MapReducePolicyProviderAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,58 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapred;
+
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.mapreduce.test.system.TTProtocol;
+import org.apache.hadoop.security.authorize.Service;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+/**
+ * This aspect adds two MR specific Herriot protocols tp the list of
+ * 'authorized' Herriot protocols. Protocol descriptors i.e.
+ * 'security.tt.protocol.acl' have to be added to <code>hadoop-policy.xml</code>
+ * if present
+ */
+public privileged aspect MapReducePolicyProviderAspect {
+ private static final Log LOG = LogFactory
+ .getLog(MapReducePolicyProviderAspect.class);
+ ArrayList<Service> herriotMRServices = null;
+
+ pointcut updateMRServices() :
+ execution (public Service[] MapReducePolicyProvider.getServices());
+
+ Service[] around() : updateMRServices () {
+ herriotMRServices = new ArrayList<Service>();
+ for (Service s : MapReducePolicyProvider.mapReduceServices) {
+ LOG.debug("Copying configured protocol to "
+ + s.getProtocol().getCanonicalName());
+ herriotMRServices.add(s);
+ }
+ herriotMRServices.add(new Service("security.daemon.protocol.acl",
+ DaemonProtocol.class));
+ herriotMRServices.add(new Service("security.tt.protocol.acl",
+ TTProtocol.class));
+ final Service[] retArray = herriotMRServices
+ .toArray(new Service[herriotMRServices.size()]);
+ LOG.debug("Number of configured protocols to return: " + retArray.length);
+ return retArray;
+ }
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,114 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.util.concurrent.atomic.AtomicBoolean;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.mapred.Task.TaskReporter;
+import org.apache.hadoop.mapreduce.test.system.FinishTaskControlAction;
+import org.apache.hadoop.test.system.ControlAction;
+import org.apache.hadoop.test.system.DaemonProtocol;
+import org.apache.hadoop.mapreduce.test.system.TTProtocol;
+
+public privileged aspect TaskAspect {
+
+ private static final Log LOG = LogFactory.getLog(TaskAspect.class);
+
+ private Object waitObject = new Object();
+ private AtomicBoolean isWaitingForSignal = new AtomicBoolean(false);
+
+ private DaemonProtocol daemonProxy;
+
+ pointcut taskDoneIntercept(Task task) : execution(
+ public void Task.done(..)) && target(task);
+
+ void around(Task task) : taskDoneIntercept(task) {
+ if(task.isJobCleanupTask() || task.isJobSetupTask() || task.isTaskCleanupTask()) {
+ proceed(task);
+ return;
+ }
+ Configuration conf = task.getConf();
+ boolean controlEnabled = FinishTaskControlAction.isControlActionEnabled(conf);
+ if(controlEnabled) {
+ LOG.info("Task control enabled, waiting till client sends signal to " +
+ "complete");
+ try {
+ synchronized (waitObject) {
+ isWaitingForSignal.set(true);
+ waitObject.wait();
+ }
+ } catch (InterruptedException e) {
+ }
+ }
+ proceed(task);
+ return;
+ }
+
+ pointcut taskStatusUpdate(TaskReporter reporter, TaskAttemptID id) :
+ call(public boolean TaskUmbilicalProtocol.ping(TaskAttemptID))
+ && this(reporter) && args(id);
+
+ after(TaskReporter reporter, TaskAttemptID id) throws IOException :
+ taskStatusUpdate(reporter, id) {
+ synchronized (waitObject) {
+ if(isWaitingForSignal.get()) {
+ ControlAction[] actions = daemonProxy.getActions(
+ id.getTaskID());
+ if(actions.length == 0) {
+ return;
+ }
+ boolean shouldProceed = false;
+ for(ControlAction action : actions) {
+ if (action instanceof FinishTaskControlAction) {
+ LOG.info("Recv : Control task action to finish task id: "
+ + action.getTarget());
+ shouldProceed = true;
+ daemonProxy.removeAction(action);
+ LOG.info("Removed the control action from TaskTracker");
+ break;
+ }
+ }
+ if(shouldProceed) {
+ LOG.info("Notifying the task to completion");
+ waitObject.notify();
+ }
+ }
+ }
+ }
+
+
+ pointcut rpcInterceptor(Class k, long version,InetSocketAddress addr,
+ Configuration conf) : call(
+ public static * RPC.getProxy(Class, long ,InetSocketAddress,
+ Configuration)) && args(k, version,addr, conf) &&
+ within(org.apache.hadoop.mapred.Child) ;
+
+ after(Class k, long version, InetSocketAddress addr, Configuration conf)
+ throws IOException : rpcInterceptor(k, version, addr, conf) {
+ daemonProxy =
+ (TTProtocol) RPC.getProxy(
+ TTProtocol.class, TTProtocol.versionID, addr, conf);
+ }
+
+}
Added: hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj?rev=956666&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj (added)
+++ hadoop/mapreduce/trunk/src/test/system/aop/org/apache/hadoop/mapred/TaskTrackerAspect.aj Mon Jun 21 19:02:49 2010
@@ -0,0 +1,155 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import java.io.IOException;
+import java.util.List;
+import java.util.ArrayList;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.test.system.TTProtocol;
+import org.apache.hadoop.mapreduce.test.system.TTTaskInfo;
+import org.apache.hadoop.mapred.TTTaskInfoImpl.MapTTTaskInfo;
+import org.apache.hadoop.mapred.TTTaskInfoImpl.ReduceTTTaskInfo;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.system.DaemonProtocol;
+import org.apache.hadoop.util.Shell;
+import org.apache.hadoop.util.Shell.ShellCommandExecutor;
+import org.apache.hadoop.mapred.TaskTracker.TaskInProgress;
+import org.apache.hadoop.mapreduce.TaskAttemptID;
+
+public privileged aspect TaskTrackerAspect {
+
+ declare parents : TaskTracker implements TTProtocol;
+
+ // Add a last sent status field to the Tasktracker class.
+ TaskTrackerStatus TaskTracker.lastSentStatus = null;
+ public static String TaskTracker.TASKJARDIR = TaskTracker.JARSDIR;
+
+ public synchronized TaskTrackerStatus TaskTracker.getStatus()
+ throws IOException {
+ return lastSentStatus;
+ }
+
+ public Configuration TaskTracker.getDaemonConf() throws IOException {
+ return fConf;
+ }
+
+ public TTTaskInfo[] TaskTracker.getTasks() throws IOException {
+ List<TTTaskInfo> infoList = new ArrayList<TTTaskInfo>();
+ synchronized (tasks) {
+ for (TaskInProgress tip : tasks.values()) {
+ TTTaskInfo info = getTTTaskInfo(tip);
+ infoList.add(info);
+ }
+ }
+ return (TTTaskInfo[]) infoList.toArray(new TTTaskInfo[infoList.size()]);
+ }
+
+ public TTTaskInfo TaskTracker.getTask(org.apache.hadoop.mapreduce.TaskID id)
+ throws IOException {
+ TaskID old = org.apache.hadoop.mapred.TaskID.downgrade(id);
+ synchronized (tasks) {
+ for(TaskAttemptID ta : tasks.keySet()) {
+ if(old.equals(ta.getTaskID())) {
+ return getTTTaskInfo(tasks.get(ta));
+ }
+ }
+ }
+ return null;
+ }
+
+ private TTTaskInfo TaskTracker.getTTTaskInfo(TaskInProgress tip) {
+ TTTaskInfo info;
+ if (tip.task.isMapTask()) {
+ info = new MapTTTaskInfo(tip.slotTaken, tip.wasKilled,
+ (MapTaskStatus) tip.getStatus(), tip.getJobConf(), tip.getTask()
+ .getUser(), tip.getTask().isTaskCleanupTask(), getPid(tip.getTask().getTaskID()));
+ } else {
+ info = new ReduceTTTaskInfo(tip.slotTaken, tip.wasKilled,
+ (ReduceTaskStatus) tip.getStatus(), tip.getJobConf(), tip.getTask()
+ .getUser(), tip.getTask().isTaskCleanupTask(),getPid(tip.getTask().getTaskID()));
+ }
+ return info;
+ }
+
+ before(TaskTrackerStatus newStatus, TaskTracker tracker) :
+ set(TaskTrackerStatus TaskTracker.status)
+ && args(newStatus) && this(tracker) {
+ if (newStatus == null) {
+ tracker.lastSentStatus = tracker.status;
+ }
+ }
+
+ pointcut ttConstructorPointCut(JobConf conf) :
+ call(TaskTracker.new(JobConf))
+ && args(conf);
+
+ after(JobConf conf) returning (TaskTracker tracker):
+ ttConstructorPointCut(conf) {
+ try {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ tracker.setUser(ugi.getShortUserName());
+ } catch (IOException e) {
+ tracker.LOG.warn("Unable to get the user information for the " +
+ "Jobtracker");
+ }
+ tracker.setReady(true);
+ }
+
+ pointcut getVersionAspect(String protocol, long clientVersion) :
+ execution(public long TaskTracker.getProtocolVersion(String ,
+ long) throws IOException) && args(protocol, clientVersion);
+
+ long around(String protocol, long clientVersion) :
+ getVersionAspect(protocol, clientVersion) {
+ if(protocol.equals(DaemonProtocol.class.getName())) {
+ return DaemonProtocol.versionID;
+ } else if(protocol.equals(TTProtocol.class.getName())) {
+ return TTProtocol.versionID;
+ } else {
+ return proceed(protocol, clientVersion);
+ }
+ }
+
+ public boolean TaskTracker.isProcessTreeAlive(String pid) throws IOException {
+ // Command to be executed is as follows :
+ // ps -o pid,ppid,sid,command -e | grep -v ps | grep -v grep | grep
+ // "$pid"
+ String checkerCommand =
+ getDaemonConf().get(
+ "test.system.processgroup_checker_command",
+ "ps -o pid,ppid,sid,command -e "
+ + "| grep -v ps | grep -v grep | grep \"$");
+ String[] command =
+ new String[] { "bash", "-c", checkerCommand + pid + "\"" };
+ ShellCommandExecutor shexec = new ShellCommandExecutor(command);
+ try {
+ shexec.execute();
+ } catch (Shell.ExitCodeException e) {
+ TaskTracker.LOG
+ .info("The process tree grep threw a exitcode exception pointing "
+ + "to process tree not being alive.");
+ return false;
+ }
+ TaskTracker.LOG.info("The task grep command is : "
+ + shexec.toString() + " the output from command is : "
+ + shexec.getOutput());
+ return true;
+ }
+}