You are viewing a plain text version of this content. The canonical link for it is here.
Posted to hdfs-commits@hadoop.apache.org by co...@apache.org on 2010/06/02 02:52:29 UTC
svn commit: r950323 - in /hadoop/hdfs/trunk: ./ ivy/ src/test/aop/build/
src/test/system/ src/test/system/aop/ src/test/system/aop/org/
src/test/system/aop/org/apache/ src/test/system/aop/org/apache/hadoop/
src/test/system/aop/org/apache/hadoop/hdfs/ s...
Author: cos
Date: Wed Jun 2 00:52:28 2010
New Revision: 950323
URL: http://svn.apache.org/viewvc?rev=950323&view=rev
Log:
HDFS-1134. Large-scale Automated Framework. Contributed by Konstantin Boudnik.
Added:
hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml
hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml
hadoop/hdfs/trunk/src/test/system/
hadoop/hdfs/trunk/src/test/system/aop/
hadoop/hdfs/trunk/src/test/system/aop/org/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/
hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj
hadoop/hdfs/trunk/src/test/system/conf/
hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml
hadoop/hdfs/trunk/src/test/system/java/
hadoop/hdfs/trunk/src/test/system/java/org/
hadoop/hdfs/trunk/src/test/system/java/org/apache/
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java
hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java
hadoop/hdfs/trunk/src/test/system/test/
hadoop/hdfs/trunk/src/test/system/test/org/
hadoop/hdfs/trunk/src/test/system/test/org/apache/
hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/
hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/
hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java
Modified:
hadoop/hdfs/trunk/CHANGES.txt
hadoop/hdfs/trunk/build.xml
hadoop/hdfs/trunk/ivy.xml
hadoop/hdfs/trunk/ivy/libraries.properties
hadoop/hdfs/trunk/src/test/aop/build/aop.xml
Modified: hadoop/hdfs/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/CHANGES.txt?rev=950323&r1=950322&r2=950323&view=diff
==============================================================================
--- hadoop/hdfs/trunk/CHANGES.txt (original)
+++ hadoop/hdfs/trunk/CHANGES.txt Wed Jun 2 00:52:28 2010
@@ -91,6 +91,8 @@ Release 0.21.0 - Unreleased
NEW FEATURES
+ HDFS-1134. Large-scale Automated Framework. (cos)
+
HDFS-436. Introduce AspectJ framework for HDFS code and tests.
(Konstantin Boudnik via szetszwo)
Modified: hadoop/hdfs/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/build.xml?rev=950323&r1=950322&r2=950323&view=diff
==============================================================================
--- hadoop/hdfs/trunk/build.xml (original)
+++ hadoop/hdfs/trunk/build.xml Wed Jun 2 00:52:28 2010
@@ -28,6 +28,8 @@
<property name="Name" value="Hadoop-Hdfs"/>
<property name="name" value="hadoop-hdfs"/>
+ <!-- Need to change aop.xml project.version prop. synchronously
+ -->
<property name="version" value="0.22.0-SNAPSHOT"/>
<property name="final.name" value="${name}-${version}"/>
<property name="test.hdfs.final.name" value="${name}-test-${version}"/>
@@ -369,20 +371,11 @@
</target>
<target name="compile-hdfs-test" depends="compile-hdfs-classes, ivy-retrieve-test">
- <mkdir dir="${test.hdfs.build.classes}"/>
- <javac
- encoding="${build.encoding}"
- srcdir="${test.src.dir}/hdfs;${test.src.dir}/unit"
- includes="org/apache/hadoop/**/*.java"
- destdir="${test.hdfs.build.classes}"
- debug="${javac.debug}"
- optimize="${javac.optimize}"
- target="${javac.version}"
- source="${javac.version}"
- deprecation="${javac.deprecation}">
- <compilerarg line="${javac.args} ${javac.args.warnings}" />
- <classpath refid="test.classpath"/>
- </javac>
+ <macro-compile-hdfs-test
+ target.dir="${test.hdfs.build.classes}"
+ source.dir="${test.src.dir}/hdfs;${test.src.dir}/unit"
+ dest.dir="${test.hdfs.build.classes}"
+ classpath="test.classpath"/>
<delete dir="${test.cache.data}"/>
<mkdir dir="${test.cache.data}"/>
@@ -398,6 +391,29 @@
<copy file="${test.src.dir}/hdfs/org/apache/hadoop/hdfs/tools/offlineImageViewer/fsimageV19" todir="${test.cache.data}"/>
</target>
+ <macrodef name="macro-compile-hdfs-test">
+ <attribute name="target.dir"/>
+ <attribute name="source.dir"/>
+ <attribute name="dest.dir"/>
+ <attribute name="classpath"/>
+ <sequential>
+ <mkdir dir="@{target.dir}"/>
+ <javac
+ encoding="${build.encoding}"
+ srcdir="@{source.dir}"
+ includes="org/apache/hadoop/**/*.java"
+ destdir="@{dest.dir}"
+ debug="${javac.debug}"
+ optimize="${javac.optimize}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <compilerarg line="${javac.args} ${javac.args.warnings}"/>
+ <classpath refid="@{classpath}"/>
+ </javac>
+ </sequential>
+ </macrodef>
+
<!-- ================================================================== -->
<!-- Make hadoop-test.jar -->
<!-- ================================================================== -->
@@ -409,6 +425,15 @@
<copy todir="${test.build.classes}">
<fileset dir="${test.hdfs.build.classes}"/>
</copy>
+ <subant buildpath="build.xml" target="-do-jar-test">
+ </subant>
+ <jar jarfile="${hadoop-hdfs-test-sources.jar}">
+ <fileset dir="${test.src.dir}/hdfs" includes="org/apache/hadoop/**/*.java" />
+ <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
+ </jar>
+ </target>
+
+ <target name="-do-jar-test">
<jar jarfile="${build.dir}/${test.hdfs.final.name}.jar"
basedir="${test.build.classes}">
<manifest>
@@ -422,10 +447,6 @@
</manifest>
</jar>
- <jar jarfile="${hadoop-hdfs-test-sources.jar}">
- <fileset dir="${test.src.dir}/hdfs" includes="org/apache/hadoop/**/*.java" />
- <fileset dir="${test.src.dir}/unit" includes="org/apache/hadoop/**/*.java" />
- </jar>
</target>
<!-- ================================================================== -->
@@ -458,6 +479,7 @@
description="Make hadoop-fi.jar">
<macro-jar-fault-inject
target.name="jar"
+ build.dir="${build-fi.dir}"
jar.final.name="final.name"
jar.final.value="${final.name}-fi" />
</target>
@@ -514,15 +536,19 @@
<macrodef name="macro-test-runner">
<attribute name="test.file" />
<attribute name="suite.type" />
+ <attribute name="classpath" />
+ <attribute name="test.dir" />
+ <attribute name="fileset.dir" />
+ <attribute name="hadoop.conf.dir.deployed" default="" />
<sequential>
- <delete dir="${test.build.data}"/>
- <mkdir dir="${test.build.data}"/>
- <delete dir="${test.log.dir}"/>
- <mkdir dir="${test.log.dir}"/>
+ <delete dir="@{test.dir}/data"/>
+ <mkdir dir="@{test.dir}/data"/>
+ <delete dir="@{test.dir}/logs"/>
+ <mkdir dir="@{test.dir}/logs"/>
<copy file="${test.src.dir}/hadoop-policy.xml"
- todir="${test.build.extraconf}" />
+ todir="@{test.dir}/extraconf" />
<copy file="${test.src.dir}/fi-site.xml"
- todir="${test.build.extraconf}" />
+ todir="@{test.dir}/extraconf" />
<junit showoutput="${test.output}"
printsummary="${test.junit.printsummary}"
haltonfailure="${test.junit.haltonfailure}"
@@ -531,42 +557,47 @@
maxmemory="${test.junit.maxmemory}"
dir="${basedir}" timeout="${test.timeout}"
errorProperty="tests.failed" failureProperty="tests.failed">
- <sysproperty key="test.build.data" value="${test.build.data}"/>
+ <sysproperty key="test.build.data" value="@{test.dir}/data"/>
<sysproperty key="test.cache.data" value="${test.cache.data}"/>
<sysproperty key="test.debug.data" value="${test.debug.data}"/>
- <sysproperty key="hadoop.log.dir" value="${test.log.dir}"/>
- <sysproperty key="test.src.dir" value="${test.src.dir}"/>
- <sysproperty key="test.build.extraconf" value="${test.build.extraconf}" />
+ <sysproperty key="hadoop.log.dir" value="@{test.dir}/logs"/>
+ <sysproperty key="test.src.dir" value="@{fileset.dir}"/>
+ <sysproperty key="test.build.extraconf" value="@{test.dir}/extraconf" />
<sysproperty key="hadoop.policy.file" value="hadoop-policy.xml"/>
<sysproperty key="hdfs.rpc.engine" value="${test.hdfs.rpc.engine}"/>
- <classpath refid="test.classpath"/>
+ <classpath refid="@{classpath}"/>
<!-- Pass probability specifications to the spawn JVM -->
<syspropertyset id="FaultProbabilityProperties">
<propertyref regex="fi.*"/>
</syspropertyset>
+ <sysproperty key="test.system.hdrc.deployed.hadoopconfdir"
+ value="@{hadoop.conf.dir.deployed}" />
<formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" if="tests.notestcase">
- <fileset dir="${test.src.dir}/@{suite.type}" excludes="**/${test.exclude}.java">
+ <batchtest todir="@{test.dir}" if="tests.notestcase">
+ <fileset dir="@{fileset.dir}/@{suite.type}"
+ excludes="**/${test.exclude}.java **/${test.exclude}.java
+ aop/** system/**">
<patternset>
<includesfile name="@{test.file}"/>
</patternset>
</fileset>
</batchtest>
- <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
- <fileset dir="${test.src.dir}/aop"
+ <batchtest todir="@{test.dir}" if="tests.notestcase.fi">
+ <fileset dir="@{fileset.dir}/aop"
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java" />
</batchtest>
- <batchtest todir="${test.build.dir}" if="tests.testcase">
- <fileset dir="${test.src.dir}/@{suite.type}" includes="**/${testcase}.java"/>
+ <batchtest todir="@{test.dir}" if="tests.testcase">
+ <fileset dir="@{fileset.dir}/@{suite.type}" includes="**/${testcase}.java"
+ excludes="aop/** system/**"/>
</batchtest>
- <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
- <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+ <batchtest todir="@{test.dir}" if="tests.testcase.fi">
+ <fileset dir="@{fileset.dir}/aop" includes="**/${testcase}.java"/>
</batchtest>
<!--The following batch is for very special occasions only when
a non-FI tests are needed to be executed against FI-environment -->
- <batchtest todir="${test.build.dir}" if="tests.testcaseonly.fi">
- <fileset dir="${test.src.dir}/hdfs" includes="**/${testcase}.java"/>
+ <batchtest todir="@{test.dir}" if="tests.testcaseonly.fi">
+ <fileset dir="@{fileset.dir}/hdfs" includes="**/${testcase}.java"/>
</batchtest>
</junit>
<antcall target="checkfailure"/>
@@ -574,15 +605,30 @@
</macrodef>
<target name="run-test-hdfs" depends="compile-hdfs-test" description="Run full set of hdfs unit tests">
- <macro-test-runner test.file="${test.hdfs.all.tests.file}" suite.type="hdfs"/>
+ <macro-test-runner
+ test.file="${test.hdfs.all.tests.file}"
+ suite.type="hdfs"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
<target name="run-commit-test" depends="compile-hdfs-test" description="Run approximate 10-minute set of unit tests prior to commiting">
- <macro-test-runner test.file="${test.hdfs.commit.tests.file}" suite.type="hdfs"/>
+ <macro-test-runner
+ test.file="${test.hdfs.commit.tests.file}"
+ suite.type="hdfs"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
<target name="run-test-unit" depends="compile-hdfs-test" description="Run unit tests">
- <macro-test-runner test.file="${test.hdfs.all.tests.file}" suite.type="unit"/>
+ <macro-test-runner
+ test.file="${test.hdfs.all.tests.file}"
+ suite.type="unit"
+ classpath="${test.classpath.id}"
+ test.dir="${test.build.dir}"
+ fileset.dir="${test.src.dir}"/>
</target>
<target name="checkfailure" if="tests.failed">
@@ -597,7 +643,7 @@
<property name="clover.jar" value="${clover.jar}"/>
<fileset file="${contrib.dir}/build.xml"/>
</subant>
- </target>
+ </target>
<target name="test-core" description="Run hdfs unit tests">
<delete file="${test.build.dir}/testsfailed"/>
@@ -978,6 +1024,42 @@
</target>
+ <target name="binary-system" depends="bin-package, jar-system, jar-test-system"
+ description="make system test package for deployment">
+ <copy todir="${system-test-build-dir}/${final.name}">
+ <fileset dir="${dist.dir}">
+ </fileset>
+ </copy>
+ <copy todir="${system-test-build-dir}/${final.name}/conf">
+ <fileset dir="${test.src.dir}/system/conf/"/>
+ </copy>
+ <copy tofile="${system-test-build-dir}/${final.name}/lib/hadoop-common-${version}.jar"
+ file="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${version}.jar"
+ overwrite="true"/>
+ <copy tofile="${system-test-build-dir}/${final.name}/${final.name}.jar"
+ file="${system-test-build-dir}/${instrumented.final.name}.jar" overwrite="true"/>
+ <copy tofile="${system-test-build-dir}/${final.name}/${final.name}-sources.jar"
+ file="${system-test-build-dir}/${instrumented.final.name}-sources.jar" overwrite="true"/>
+ <copy todir="${system-test-build-dir}/${final.name}"
+ file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar"/>
+ <copy todir="${system-test-build-dir}/${final.name}"
+ file="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar"/>
+ <macro_tar
+ param.destfile="${system-test-build-dir}/${final.name}-bin.tar.gz">
+ <param.listofitems>
+ <tarfileset dir="${system-test-build-dir}" mode="664">
+ <exclude name="${final.name}/bin/*" />
+ <exclude name="${final.name}/src/**" />
+ <exclude name="${final.name}/docs/**" />
+ <include name="${final.name}/**" />
+ </tarfileset>
+ <tarfileset dir="${build.dir}" mode="755">
+ <include name="${final.name}/bin/*" />
+ </tarfileset>
+ </param.listofitems>
+ </macro_tar>
+ </target>
+
<target name="binary" depends="bin-package" description="Make tarball without source and documentation">
<macro_tar param.destfile="${build.dir}/${final.name}-bin.tar.gz">
<param.listofitems>
@@ -1015,12 +1097,13 @@
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
- <target name="clean" depends="clean-contrib" description="Clean. Delete the build files, and their directories">
+ <target name="clean" depends="clean-contrib, clean-fi" description="Clean. Delete the build files, and their directories">
<delete dir="${build.dir}"/>
<delete dir="${build-fi.dir}"/>
<delete dir="${docs.src}/build"/>
<delete file="${hadoop-hdfs.pom}"/>
<delete file="${hadoop-hdfs-test.pom}"/>
+ <delete file="${hadoop-hdfs-instrumented.pom}"/>
</target>
<target name="veryclean" depends="clean-cache,clean"
@@ -1032,6 +1115,8 @@
<target name="clean-cache" depends="clean" description="Clean. Delete ivy cache">
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs"/>
<delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-test"/>
+ <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-${herriot.suffix}"/>
+ <delete dir="${user.home}/.ivy2/cache/org.apache.hadoop/hadoop-hdfs-${herriot.suffix}-test"/>
</target>
<!-- ================================================================== -->
@@ -1282,7 +1367,8 @@
</artifact:install>
</target>
- <target name="mvn-install" depends="mvn-taskdef,jar,jar-hdfs-test,set-version">
+ <target name="mvn-install" depends="mvn-taskdef,jar,jar-hdfs-test,set-version,
+ -mvn-system-install">
<artifact:pom file="${hadoop-hdfs.pom}" id="hadoop.hdfs"/>
<artifact:pom file="${hadoop-hdfs-test.pom}" id="hadoop.hdfs.test"/>
<artifact:install file="${hadoop-hdfs.jar}">
@@ -1295,7 +1381,8 @@
</artifact:install>
</target>
- <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-hdfs-test, set-version">
+ <target name="mvn-deploy" depends="mvn-taskdef, jar, jar-hdfs-test, set-version,
+ -mvn-system-deploy">
<property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
<artifact:pom file="${hadoop-hdfs.pom}" id="hadoop.hdfs"/>
<artifact:pom file="${hadoop-hdfs-test.pom}" id="hadoop.hdfs.test"/>
@@ -1316,20 +1403,22 @@
<target name="set-version">
<delete file="${basedir}/ivy/hadoop-hdfs.xml"/>
<delete file="${basedir}/ivy/hadoop-hdfs-test.xml"/>
+ <delete file="${hadoop-hdfs-instrumented.pom}"/>
+ <delete file="${hadoop-hdfs-instrumented-test.pom}"/>
<copy file="${basedir}/ivy/hadoop-hdfs-template.xml" tofile="${basedir}/ivy/hadoop-hdfs.xml"/>
<copy file="${basedir}/ivy/hadoop-hdfs-test-template.xml" tofile="${basedir}/ivy/hadoop-hdfs-test.xml"/>
+ <copy file="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-template.xml"
+ tofile="${hadoop-hdfs-instrumented.pom}"/>
+ <copy file="${basedir}/ivy/hadoop-hdfs-${herriot.suffix}-test-template.xml"
+ tofile="${hadoop-hdfs-instrumented-test.pom}"/>
<replaceregexp byline="true">
<regexp pattern="@version"/>
<substitution expression="${version}"/>
<fileset dir="${basedir}/ivy">
<include name="hadoop-hdfs.xml"/>
- </fileset>
- </replaceregexp>
- <replaceregexp byline="true">
- <regexp pattern="@version"/>
- <substitution expression="${version}"/>
- <fileset dir="${basedir}/ivy">
<include name="hadoop-hdfs-test.xml"/>
+ <include name="hadoop-hdfs-${herriot.suffix}.xml"/>
+ <include name="hadoop-hdfs-${herriot.suffix}-test.xml"/>
</fileset>
</replaceregexp>
</target>
@@ -1403,6 +1492,11 @@
log="${ivyresolvelog}"/>
</target>
+ <target name="ivy-resolve-system" depends="ivy-init">
+ <ivy:resolve settingsRef="${ant.project.name}.ivy.settings" conf="system"
+ log="${ivyresolvelog}"/>
+ </target>
+
<target name="ivy-retrieve" depends="ivy-resolve"
description="Retrieve Ivy-managed artifacts">
<ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
@@ -1458,6 +1552,14 @@
<ivy:cachepath pathid="releaseaudit-classpath" conf="releaseaudit"/>
</target>
+ <target name="ivy-retrieve-system" depends="ivy-resolve-system"
+ description="Retrieve Ivy-managed artifacts for the system tests">
+ <ivy:retrieve settingsRef="${ant.project.name}.ivy.settings"
+ pattern="${build.ivy.lib.dir}/${ivy.artifact.retrieve.pattern}"
+ log="${ivyretrievelog}"/>
+ <ivy:cachepath pathid="ivy-test.classpath" conf="system"/>
+ </target>
+
<target name="ivy-report" depends="ivy-resolve-releaseaudit"
description="Generate">
<ivy:report todir="${build.ivy.report.dir}" settingsRef="${ant.project.name}.ivy.settings"/>
Modified: hadoop/hdfs/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/ivy.xml?rev=950323&r1=950322&r2=950323&view=diff
==============================================================================
--- hadoop/hdfs/trunk/ivy.xml (original)
+++ hadoop/hdfs/trunk/ivy.xml Wed Jun 2 00:52:28 2010
@@ -39,6 +39,7 @@
<conf name="common" visibility="private" extends="compile,runtime" description="common artifacts"/>
<conf name="javadoc" visibility="private" description="artiracts required while performing doc generation" extends="common"/>
<conf name="test" extends="common" visibility="private" description="the classpath needed to run tests"/>
+ <conf name="system" extends="test" visibility="private" description="the classpath needed to run system tests"/>
<conf name="test-hdfswithmr" extends="test, common" visibility="private" description="the classpath needed to run tests"/>
@@ -56,6 +57,7 @@
<dependencies>
<dependency org="org.apache.hadoop" name="hadoop-common" rev="${hadoop-common.version}" conf="common->default"/>
+ <dependency org="org.apache.hadoop" name="hadoop-common-instrumented" rev="${hadoop-common.version}" conf="system->default"/>
<dependency org="commons-logging" name="commons-logging" rev="${commons-logging.version}" conf="common->master"/>
<dependency org="log4j" name="log4j" rev="${log4j.version}" conf="common->master"/>
<dependency org="org.apache.hadoop" name="avro" rev="${avro.version}" conf="common->default"/>
Added: hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml (added)
+++ hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-template.xml Wed Jun 2 00:52:28 2010
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+ Apache Maven 2 POM generated by Apache Ivy
+ http://ant.apache.org/ivy/
+ Apache Ivy version: 2.0.0-rc2 20081028224207
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs-instrumented</artifactId>
+ <packaging>jar</packaging>
+ <version>@version</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-common-instrumented</artifactId>
+ <version>0.22.0-SNAPSHOT</version>
+ </dependency>
+ </dependencies>
+</project>
Added: hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml (added)
+++ hadoop/hdfs/trunk/ivy/hadoop-hdfs-instrumented-test-template.xml Wed Jun 2 00:52:28 2010
@@ -0,0 +1,39 @@
+<?xml version="1.0" encoding="UTF-8"?>
+ <!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!--
+ Apache Maven 2 POM generated by Apache Ivy
+ http://ant.apache.org/ivy/
+ Apache Ivy version: 2.0.0-rc2 20081028224207
+-->
+<project xmlns="http://maven.apache.org/POM/4.0.0" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance"
+ xsi:schemaLocation="http://maven.apache.org/POM/4.0.0 http://maven.apache.org/maven-v4_0_0.xsd">
+
+ <modelVersion>4.0.0</modelVersion>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs-instrumented-test</artifactId>
+ <packaging>jar</packaging>
+ <version>@version</version>
+ <dependencies>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-hdfs-instrumented</artifactId>
+ <version>@version</version>
+ </dependency>
+ </dependencies>
+</project>
Modified: hadoop/hdfs/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/ivy/libraries.properties?rev=950323&r1=950322&r2=950323&view=diff
==============================================================================
--- hadoop/hdfs/trunk/ivy/libraries.properties (original)
+++ hadoop/hdfs/trunk/ivy/libraries.properties Wed Jun 2 00:52:28 2010
@@ -75,6 +75,7 @@ slf4j-log4j12.version=1.4.3
xmlenc.version=0.52
xerces.version=1.4.4
+#This property has to be updated synchronously with aop.xml
aspectj.version=1.6.5
mockito-all.version=1.8.2
Modified: hadoop/hdfs/trunk/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/aop/build/aop.xml?rev=950323&r1=950322&r2=950323&view=diff
==============================================================================
--- hadoop/hdfs/trunk/src/test/aop/build/aop.xml (original)
+++ hadoop/hdfs/trunk/src/test/aop/build/aop.xml Wed Jun 2 00:52:28 2010
@@ -14,13 +14,42 @@
See the License for the specific language governing permissions and
limitations under the License.
-->
-<project name="aspects">
+<project name="aspects"
+ xmlns:artifact="urn:maven-artifact-ant">
+ <!-- The followng are duplications and have to be customized elsewhere too -->
+ <!-- TODO this version has to be updated synchronously with Ivy -->
+ <property name="aspectversion" value="1.6.5"/>
+ <!-- TODO this has to be changed synchronously with build.xml version prop.-->
+ <!-- this workarounds of test-patch setting its own 'version' -->
+ <property name="project.version" value="0.22.0-SNAPSHOT"/>
+
+ <!-- Properties common for all fault injections -->
<property name="build-fi.dir" value="${basedir}/build-fi"/>
<property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
<property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
- <property name="aspectversion" value="1.6.5"/>
<property file="${basedir}/build.properties"/>
+ <!-- Properties related to system fault injection and tests -->
+ <property name="system-test-build-dir" value="${build-fi.dir}/system"/>
+ <!-- This varialbe is set by respective injection targets -->
+ <property name="hadoop.instrumented.jar" value=""/>
+
+ <!-- Properties specifically for system fault-injections and system tests -->
+ <property name="herriot.suffix" value="instrumented"/>
+ <property name="instrumented.final.name"
+ value="${name}-${herriot.suffix}-${version}"/>
+ <property name="hadoop-hdfs-instrumented.pom"
+ location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}.xml" />
+ <property name="hadoop-hdfs-instrumented-test.pom"
+ location="${ivy.dir}/hadoop-hdfs-${herriot.suffix}-test.xml" />
+ <property name="hadoop-hdfs-instrumented.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}.jar" />
+ <property name="hadoop-hdfs-instrumented-sources.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-${version}-sources.jar" />
+ <property name="hadoop-hdfs-instrumented-test.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}.jar" />
+ <property name="hadoop-hdfs-instrumented-test-sources.jar"
+ location="${system-test-build-dir}/${name}-${herriot.suffix}-test-${version}-sources.jar" />
<!--All Fault Injection (FI) related targets are located in this session -->
<target name="clean-fi">
@@ -39,21 +68,27 @@
<taskdef
resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
<classpath>
- <pathelement
+ <pathelement
location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
</classpath>
</taskdef>
<echo message="Start weaving aspects in place"/>
+ <path id="aspect.path">
+ <pathelement location="${hadoop.instrumented.jar}"/>
+ </path>
<iajc
encoding="${build.encoding}"
- srcdir="${java.src.dir};${build.src};${test.src.dir}/aop"
+ srcdir="${java.src.dir};${build.src};${src.dir.path}"
includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
- excludes="org/apache/hadoop/record/**/*"
- destDir="${build.classes}"
+ excludes="org/apache/hadoop/classification/tools/**/*, org/apache/hadoop/record/**/*"
+ destDir="${dest.dir}"
debug="${javac.debug}"
target="${javac.version}"
source="${javac.version}"
- deprecation="${javac.deprecation}">
+ deprecation="${javac.deprecation}"
+ fork="true"
+ maxmem="256m">
+ <aspectpath refid="aspect.path"/>
<classpath refid="test.classpath"/>
</iajc>
<loadfile property="injection.failure" srcfile="${compile-inject.output}">
@@ -69,15 +104,122 @@
<echo message="Weaving of aspects is finished"/>
</target>
+ <!-- Classpath for running system tests -->
+ <path id="test.system.classpath">
+ <pathelement location="${hadoop.conf.dir.deployed}" />
+ <pathelement location="${system-test-build-dir}/test/extraconf" />
+ <pathelement location="${system-test-build-dir}/test/classes" />
+ <pathelement location="${system-test-build-dir}/classes" />
+ <pathelement location="${test.src.dir}" />
+ <pathelement location="${build-fi.dir}" />
+ <pathelement location="${build-fi.dir}/tools" />
+ <pathelement path="${clover.jar}" />
+ <fileset dir="${system-test-build-dir}">
+ <include name="**/*.jar" />
+ <exclude name="**/excluded/" />
+ </fileset>
+ <path refid="classpath" />
+ </path>
+
+ <!-- ================ -->
+ <!-- run system tests -->
+ <!-- ================ -->
+ <target name="test-system" depends="ivy-retrieve-common, ivy-retrieve-system"
+ description="Run system tests">
+ <subant buildpath="build.xml" target="jar-test-system"/>
+ <macro-test-runner test.file="${test.hdfs.all.tests.file}"
+ suite.type="system/test"
+ classpath="test.system.classpath"
+ test.dir="${system-test-build-dir}/test"
+ fileset.dir="${test.src.dir}"
+ hadoop.conf.dir.deployed="${hadoop.conf.dir.deployed}">
+ </macro-test-runner>
+ </target>
+
<target name="injectfaults"
description="Instrument classes with faults and other AOP advices">
<!--mkdir to prevent <subant> failure in case the folder has been removed-->
<mkdir dir="${build-fi.dir}"/>
<delete file="${compile-inject.output}"/>
- <subant buildpath="${basedir}" target="compile-fault-inject"
- output="${compile-inject.output}">
+ <weave-injectfault-aspects dest.dir="${build-fi.dir}/classes"
+ src.dir="${test.src.dir}/aop"
+ aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/test/hadoop-common-${project.version}.jar">
+ </weave-injectfault-aspects>
+ </target>
+
+ <!-- =============================================================== -->
+ <!-- Create hadoop-{version}-dev-core.jar required to be deployed on -->
+ <!-- cluster for system tests -->
+ <!-- =============================================================== -->
+ <target name="jar-system"
+ depends="inject-system-faults"
+ description="Make hadoop-hdfs-instrumented.jar with system injections.">
+ <macro-jar-fault-inject target.name="jar"
+ build.dir="${system-test-build-dir}"
+ jar.final.name="final.name"
+ jar.final.value="${instrumented.final.name}">
+ </macro-jar-fault-inject>
+ <jar jarfile="${system-test-build-dir}/${instrumented.final.name}-sources.jar"
+ update="yes">
+ <fileset dir="${test.src.dir}/system/java" includes="org/apache/hadoop/**/*.java" />
+ <fileset dir="${test.src.dir}/system/aop" includes="org/apache/hadoop/**/*.aj" />
+ </jar>
+ </target>
+
+ <target name="jar-test-system" depends="inject-system-faults, compile-test-system"
+ description="Make hadoop-hdfs-instrumented-test.jar with system injections.">
+ <subant buildpath="build.xml" target="-do-jar-test">
+ <property name="build.dir" value="${system-test-build-dir}"/>
+ <property name="test.hdfs.final.name" value="${name}-${herriot.suffix}-test-${version}"/>
+ <property name="test.build.classes"
+ value="${system-test-build-dir}/test/classes"/>
+ </subant>
+ <jar jarfile="${hadoop-hdfs-instrumented-test-sources.jar}">
+ <fileset dir="${test.src.dir}/system/test" includes="org/apache/hadoop/**/*.java" />
+ </jar>
+ </target>
+
+ <target name="compile-test-system" description="Compiles system tests">
+ <subant buildpath="build.xml" target="-compile-test-system.wrapper">
+ <property name="build.dir" value="${system-test-build-dir}"/>
+ </subant>
+ </target>
+
+ <target name="-compile-test-system.wrapper" depends="ivy-retrieve-common, ivy-retrieve-system">
+ <macro-compile-hdfs-test
+ target.dir="${system-test-build-dir}/test/classes"
+ source.dir="${test.src.dir}/system/test"
+ dest.dir="${system-test-build-dir}/test/classes"
+ classpath="test.classpath"/>
+ </target>
+
+ <macrodef name="weave-injectfault-aspects">
+ <attribute name="dest.dir" />
+ <attribute name="src.dir" />
+ <attribute name="aspects.jar"/>
+ <sequential>
+ <subant buildpath="build.xml" target="compile-fault-inject"
+ output="${compile-inject.output}">
+ <property name="build.dir" value="${build-fi.dir}" />
+ <property name="src.dir.path" value="@{src.dir}" />
+ <property name="dest.dir" value="@{dest.dir}" />
+ <property name="hadoop.instrumented.jar" value="@{aspects.jar}"/>
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <target name="inject-system-faults"
+ description="Inject system faults">
+ <property name="build-fi.dir" value="${system-test-build-dir}" />
+ <mkdir dir="${build-fi.dir}"/>
+ <delete file="${compile-inject.output}"/>
+ <subant buildpath="build.xml" target="ivy-retrieve-system">
<property name="build.dir" value="${build-fi.dir}"/>
</subant>
+ <weave-injectfault-aspects dest.dir="${system-test-build-dir}/classes"
+ src.dir="${test.src.dir}/system/java;${test.src.dir}/system/aop"
+ aspects.jar="${build-fi.dir}/ivy/lib/${ant.project.name}/system/hadoop-common-${herriot.suffix}-${project.version}.jar">
+ </weave-injectfault-aspects>
</target>
<macrodef name="macro-run-tests-fault-inject">
@@ -99,11 +241,12 @@
<!-- ================================================================== -->
<macrodef name="macro-jar-fault-inject">
<attribute name="target.name" />
+ <attribute name="build.dir" />
<attribute name="jar.final.name" />
<attribute name="jar.final.value" />
<sequential>
<subant buildpath="build.xml" target="@{target.name}">
- <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="build.dir" value="@{build.dir}"/>
<property name="@{jar.final.name}" value="@{jar.final.value}"/>
<property name="jar.extra.properties.list"
value="${test.src.dir}/fi-site.xml" />
@@ -129,4 +272,78 @@
</macrodef>
<!--End of Fault Injection (FI) related session-->
+
+ <!-- Start of cluster controller binary target -->
+ <property name="runAs.src"
+ value ="${test.src.dir}/system/c++/runAs"/>
+ <property name="runAs.build.dir"
+ value="${system-test-build-dir}/c++-build"/>
+ <property name="runAs.configure.script"
+ value="${runAs.build.dir}/configure"/>
+ <target name="init-runAs-build">
+ <condition property="runAs.parameters.passed">
+ <not>
+ <equals arg1="${run-as.hadoop.home.dir}"
+ arg2="$${run-as.hadoop.home.dir}"/>
+ </not>
+ </condition>
+ <fail unless="runAs.parameters.passed"
+ message="Required parameters run-as.hadoop.home.dir not passed to the build"/>
+ <mkdir dir="${runAs.build.dir}"/>
+ <copy todir="${runAs.build.dir}" overwrite="true">
+ <fileset dir="${runAs.src}" includes="**/*"/>
+ </copy>
+ <chmod perm="+x" file="${runAs.configure.script}">
+ </chmod>
+ </target>
+
+ <target name="configure-runAs"
+ depends="init-runAs-build">
+ <exec executable="${runAs.configure.script}"
+ dir="${runAs.build.dir}" failonerror="true">
+ <arg value="--with-home=${run-as.hadoop.home.dir}"/>
+ </exec>
+ </target>
+ <target name="run-as" depends="configure-runAs">
+ <exec executable="${make.cmd}" dir="${runAs.build.dir}"
+ searchpath="yes" failonerror="yes">
+ <arg value="all" />
+ </exec>
+ </target>
+ <!-- End of cluster controller binary target -->
+ <!-- Install Herriot artifacts to the local Maven -->
+ <target name="-mvn-system-install" depends="mvn-taskdef, jar-system, jar-test-system">
+ <artifact:pom file="${hadoop-hdfs-instrumented.pom}"
+ id="hadoop.hdfs.${herriot.suffix}"/>
+ <artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"
+ id="hadoop.hdfs.${herriot.suffix}.test"/>
+ <artifact:install file="${hadoop-hdfs-instrumented.jar}">
+ <pom refid="hadoop.hdfs.${herriot.suffix}"/>
+ <attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
+ </artifact:install>
+ <artifact:install file="${hadoop-hdfs-instrumented-test.jar}">
+ <pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
+ <attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
+ </artifact:install>
+ </target>
+ <target name="-mvn-system-deploy" depends="mvn-taskdef, jar-system, jar-test-system">
+ <property name="repourl" value="https://repository.apache.org/content/repositories/snapshots" />
+ <artifact:pom file="${hadoop-hdfs-instrumented.pom}"
+ id="hadoop.hdfs.${herriot.suffix}"/>
+ <!--<artifact:pom file="${hadoop-hdfs-instrumented-test.pom}"-->
+ <!--id="hadoop.hdfs.system.test.test"/>-->
+
+ <artifact:install-provider artifactId="wagon-http" version="1.0-beta-2"/>
+ <artifact:deploy file="${hadoop-hdfs-instrumented.jar}">
+ <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+ <pom refid="hadoop.hdfs.${herriot.suffix}"/>
+ <attach file="${hadoop-hdfs-instrumented-sources.jar}" classifier="sources" />
+ </artifact:deploy>
+ <artifact:deploy file="${hadoop-hdfs-instrumented-test.jar}">
+ <remoteRepository id="apache.snapshots.https" url="${repourl}"/>
+ <pom refid="hadoop.hdfs.${herriot.suffix}.test"/>
+ <attach file="${hadoop-hdfs-instrumented-test-sources.jar}" classifier="sources" />
+ </artifact:deploy>
+ </target>
+ <!-- End of Maven -->
</project>
Added: hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj (added)
+++ hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/HDFSPolicyProviderAspect.aj Wed Jun 2 00:52:28 2010
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.hdfs;
+
+import java.util.ArrayList;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.test.system.DaemonProtocol;
+import org.apache.hadoop.hdfs.test.system.DNProtocol;
+import org.apache.hadoop.hdfs.test.system.NNProtocol;
+import org.apache.hadoop.security.authorize.Service;
+import org.apache.hadoop.security.authorize.ServiceAuthorizationManager;
+
+/**
+ * This aspect adds two HDFS Herriot specific protocols tp the list of 'authorized'
+ * Herriot protocols.
+ * Protocol descriptors i.e. 'security.nn.protocol.acl' have to be added to
+ * <code>hadoop-policy.xml</code> if present
+ */
+public privileged aspect HDFSPolicyProviderAspect {
+ private static final Log LOG = LogFactory
+ .getLog(HDFSPolicyProviderAspect.class);
+
+ ArrayList<Service> herriotHDFSServices = null;
+
+ pointcut updateHDFSServices() :
+ execution (public Service[] HDFSPolicyProvider.getServices());
+
+ Service[] around() : updateHDFSServices () {
+ herriotHDFSServices = new ArrayList<Service>();
+ for (Service s : HDFSPolicyProvider.hdfsServices) {
+ LOG.debug("Copying configured protocol to "
+ + s.getProtocol().getCanonicalName());
+ herriotHDFSServices.add(s);
+ }
+ herriotHDFSServices.add(new Service("security.daemon.protocol.acl",
+ DaemonProtocol.class));
+ herriotHDFSServices.add(new Service("security.nn.protocol.acl",
+ NNProtocol.class));
+ herriotHDFSServices.add(new Service("security.dn.protocol.acl",
+ DNProtocol.class));
+ final Service[] retArray = herriotHDFSServices
+ .toArray(new Service[herriotHDFSServices.size()]);
+ LOG.debug("Number of configured protocols to return: " + retArray.length);
+ return retArray;
+ }
+}
Added: hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj (added)
+++ hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/datanode/DataNodeAspect.aj Wed Jun 2 00:52:28 2010
@@ -0,0 +1,67 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.datanode;
+
+import java.io.File;
+import java.io.IOException;
+import java.util.AbstractList;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.test.system.DNProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+public privileged aspect DataNodeAspect {
+ declare parents : DataNode implements DNProtocol;
+
+ public Configuration DataNode.getDaemonConf() {
+ return super.getConf();
+ }
+
+ pointcut dnConstructorPointcut(Configuration conf, AbstractList<File> dirs) :
+ call(DataNode.new(Configuration, AbstractList<File>))
+ && args(conf, dirs);
+
+ after(Configuration conf, AbstractList<File> dirs) returning (DataNode datanode):
+ dnConstructorPointcut(conf, dirs) {
+ try {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ datanode.setUser(ugi.getShortUserName());
+ } catch (IOException e) {
+ datanode.LOG.warn("Unable to get the user information for the " +
+ "Jobtracker");
+ }
+ datanode.setReady(true);
+ }
+
+ pointcut getVersionAspect(String protocol, long clientVersion) :
+ execution(public long DataNode.getProtocolVersion(String ,
+ long) throws IOException) && args(protocol, clientVersion);
+
+ long around(String protocol, long clientVersion) :
+ getVersionAspect(protocol, clientVersion) {
+ if(protocol.equals(DaemonProtocol.class.getName())) {
+ return DaemonProtocol.versionID;
+ } else if(protocol.equals(DNProtocol.class.getName())) {
+ return DNProtocol.versionID;
+ } else {
+ return proceed(protocol, clientVersion);
+ }
+ }
+}
\ No newline at end of file
Added: hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj (added)
+++ hadoop/hdfs/trunk/src/test/system/aop/org/apache/hadoop/hdfs/server/namenode/NameNodeAspect.aj Wed Jun 2 00:52:28 2010
@@ -0,0 +1,77 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.server.namenode;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.test.system.NNProtocol;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+public privileged aspect NameNodeAspect {
+ declare parents : NameNode implements NNProtocol;
+
+ // Namename doesn't store a copy of its configuration
+ // because it can be changed through the life cycle of the object
+ // So, the an exposed reference needs to be added and updated after
+ // new NameNode(Configuration conf) is complete
+ Configuration NameNode.configRef = null;
+
+ // Method simply assign a reference to the NameNode configuration object
+ void NameNode.setRef (Configuration conf) {
+ if (configRef == null)
+ configRef = conf;
+ }
+
+ public Configuration NameNode.getDaemonConf() {
+ return configRef;
+ }
+
+ pointcut nnConstructorPointcut(Configuration conf) :
+ call(NameNode.new(Configuration)) && args(conf);
+
+ after(Configuration conf) returning (NameNode namenode):
+ nnConstructorPointcut(conf) {
+ try {
+ UserGroupInformation ugi = UserGroupInformation.getCurrentUser();
+ namenode.setUser(ugi.getShortUserName());
+ } catch (IOException e) {
+ namenode.LOG.warn("Unable to get the user information for the " +
+ "Jobtracker");
+ }
+ namenode.setRef(conf);
+ namenode.setReady(true);
+ }
+
+ pointcut getVersionAspect(String protocol, long clientVersion) :
+ execution(public long NameNode.getProtocolVersion(String ,
+ long) throws IOException) && args(protocol, clientVersion);
+
+ long around(String protocol, long clientVersion) :
+ getVersionAspect(protocol, clientVersion) {
+ if(protocol.equals(DaemonProtocol.class.getName())) {
+ return DaemonProtocol.versionID;
+ } else if(protocol.equals(NNProtocol.class.getName())) {
+ return NNProtocol.versionID;
+ } else {
+ return proceed(protocol, clientVersion);
+ }
+ }
+}
\ No newline at end of file
Added: hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml (added)
+++ hadoop/hdfs/trunk/src/test/system/conf/system-test-hdfs.xml Wed Jun 2 00:52:28 2010
@@ -0,0 +1,116 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+
+<!-- Put site-specific property overrides in this file. -->
+
+<configuration>
+<!-- Mandatory properties that are to be set and uncommented before running the tests -->
+
+<property>
+ <name>test.system.hdrc.hadoophome</name>
+ <value>$(TO_DO_HADOOP_INSTALL)/share/hadoop-current</value>
+ <description> This is the path to the home directory of the hadoop deployment.
+ </description>
+</property>
+<property>
+ <name>test.system.hdrc.hadoopconfdir</name>
+ <value>$(TO_DO_HADOOP_INSTALL)/conf/hadoop</value>
+ <description> This is the path to the configuration directory of the hadoop
+ cluster that is deployed.
+ </description>
+</property>
+
+<property>
+ <name>test.system.hdrc.dn.hostfile</name>
+ <value>slaves.localcopy.txt</value>
+ <description> File name containing the hostnames where the DataNodes are running.
+ </description>
+</property>
+
+<property>
+ <name>test.system.hdfs.clusterprocess.impl.class</name>
+ <value>org.apache.hadoop.hdfs.test.system.HDFSCluster$HDFSProcessManager</value>
+ <description>
+ Cluster process manager for the Hdfs subsystem of the cluster. The value
+ org.apache.hadoop.hdfs.test.system.HDFSCluster$MultiUserHDFSProcessManager can
+ be used to enable multi user support.
+ </description>
+</property>
+
+<property>
+ <name>test.system.hdrc.deployed.scripts.dir</name>
+ <value>./src/test/system/scripts</value>
+ <description>
+ This directory hosts the scripts in the deployed location where
+ the system test client runs.
+ </description>
+</property>
+
+<property>
+ <name>test.system.hdrc.hadoopnewconfdir</name>
+ <value>$(TO_DO_GLOBAL_TMP_DIR)/newconf</value>
+ <description>
+ The directory where the new config files will be copied to in all
+ the clusters is pointed out this directory.
+ </description>
+</property>
+
+<!-- Mandatory keys to be set for the multi user support to be enabled. -->
+
+<property>
+ <name>test.system.hdfs.clusterprocess.impl.class</name>
+ <value>org.apache.hadoop.hdfs.test.system.HDFSCluster$MultiUserHDFSProcessManager</value>
+ <description>
+ Enabling multi user based cluster process manger.
+ </description>
+</property>
+<property>
+ <name>test.system.hdrc.multi-user.binary.path</name>
+ <value>$(TO_DO_HADOOP_INSTALL)/conf/hadoop/runAs</value>
+ <description>
+ Local file system path on gate way to cluster-controller binary including the binary name.
+ To build the binary the following commands need to be executed:
+ % ant run-as -Drun-as.hadoop.home.dir=(HADOOP_HOME of setup cluster)
+ % cp build-fi/system/c++-build/runAs test.system.hdrc.multi-user.binary.path
+ Location of the cluster is important security precaution.
+ The binary should be owned by root and test user group permission should be set such a
+ way that it can be executed by binary. Example usage would be:
+ % sudo chown root binary
+ % sudo chmod 6511 binary
+ Change permission appropriately to make it more secure.
+ </description>
+</property>
+<property>
+ <name>test.system.hdrc.multi-user.managinguser.namenode</name>
+ <value>*</value>
+ <description>
+ User value for managing the particular daemon, please note that these user should be
+ present on gateways also, an example configuration for the above would be
+ key name = test.system.hdrc.multi-user.managinguser.namenode
+ key value = guest
+ Please note the daemon names are all lower case, corresponding to hadoop-daemon.sh command.
+ </description>
+</property>
+<property>
+ <name>test.system.hdrc.multi-user.managinguser.datanode</name>
+ <value>*</value>
+</property>
+
+</configuration>
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNClient.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,82 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.test.system.process.RemoteProcess;
+
+/**
+ * Datanode client for system tests. Assumption of the class is that the
+ * configuration key is set for the configuration key : {@code
+ * DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY} is set, only the port portion of
+ * the address is used.
+ */
+public class DNClient extends HDFSDaemonClient<DNProtocol> {
+
+ DNProtocol proxy;
+
+ public DNClient(Configuration conf, RemoteProcess process) throws IOException {
+ super(conf, process);
+ }
+
+ @Override
+ public void connect() throws IOException {
+ if (isConnected()) {
+ return;
+ }
+ String sockAddrStr = getConf().get(DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY);
+ if (sockAddrStr == null) {
+ throw new IllegalArgumentException("Datenode IPC address is not set."
+ + "Check if " + DFSConfigKeys.DFS_DATANODE_IPC_ADDRESS_KEY
+ + " is configured.");
+ }
+ String[] splits = sockAddrStr.split(":");
+ if (splits.length != 2) {
+ throw new IllegalArgumentException(
+ "Datanode IPC address is not correctly configured");
+ }
+ String port = splits[1];
+ String sockAddr = getHostName() + ":" + port;
+ InetSocketAddress bindAddr = NetUtils.createSocketAddr(sockAddr);
+ proxy = (DNProtocol) RPC.getProxy(DNProtocol.class, DNProtocol.versionID,
+ bindAddr, getConf());
+ setConnected(true);
+ }
+
+ @Override
+ public void disconnect() throws IOException {
+ RPC.stopProxy(proxy);
+ setConnected(false);
+ }
+
+ @Override
+ protected DNProtocol getProxy() {
+ return proxy;
+ }
+
+ public Configuration getDatanodeConfig() throws IOException {
+ return getProxy().getDaemonConf();
+ }
+}
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/DNProtocol.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.security.KerberosInfo;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+/**
+ * Client side API exposed from Datanode.
+ * Actual implementations are likely to be injected
+ *
+ * The protocol has to be annotated so KerberosInfo can be filled in during
+ * creation of a ipc.Client connection
+ */
+@KerberosInfo(
+ serverPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
+public interface DNProtocol extends DaemonProtocol {
+ public static final long versionID = 1L;
+}
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSCluster.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,149 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Arrays;
+import java.util.Iterator;
+import java.util.List;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.test.system.AbstractDaemonClient;
+import org.apache.hadoop.test.system.AbstractDaemonCluster;
+import org.apache.hadoop.test.system.process.ClusterProcessManager;
+import org.apache.hadoop.test.system.process.HadoopDaemonRemoteCluster;
+import org.apache.hadoop.test.system.process.MultiUserHadoopDaemonRemoteCluster;
+import org.apache.hadoop.test.system.process.RemoteProcess;
+import org.apache.hadoop.test.system.process.HadoopDaemonRemoteCluster.HadoopDaemonInfo;
+
+public class HDFSCluster extends AbstractDaemonCluster {
+
+ static {
+ Configuration.addDefaultResource("hdfs-site.xml");
+ }
+
+ private static final Log LOG = LogFactory.getLog(HDFSCluster.class);
+ public static final String CLUSTER_PROCESS_MGR_IMPL =
+ "test.system.hdfs.clusterprocess.impl.class";
+
+ private HDFSCluster(Configuration conf, ClusterProcessManager rCluster)
+ throws IOException {
+ super(conf, rCluster);
+ }
+
+ /**
+ * Key is used to to point to the file containing hostnames of tasktrackers
+ */
+ public static final String CONF_HADOOP_DN_HOSTFILE_NAME =
+ "test.system.hdrc.dn.hostfile";
+
+ private static List<HadoopDaemonInfo> hdfsDaemonInfos;
+
+ private static String nnHostName;
+ private static String DN_hostFileName;
+
+ protected enum Role {NN, DN}
+
+ @Override
+ protected AbstractDaemonClient
+ createClient(RemoteProcess process) throws IOException {
+ Enum<?> pRole = process.getRole();
+ if (Role.NN.equals(pRole)) {
+ return createNNClient(process);
+ } else if (Role.DN.equals(pRole)) {
+ return createDNClient(process);
+ } else throw new IOException("Role " + pRole +
+ " is not supported by HDFSCluster");
+ }
+
+ protected DNClient createDNClient(RemoteProcess dnDaemon) throws IOException {
+ return new DNClient(getConf(), dnDaemon);
+ }
+
+ protected NNClient createNNClient(RemoteProcess nnDaemon) throws IOException {
+ return new NNClient(getConf(), nnDaemon);
+ }
+
+ public NNClient getNNClient () {
+ Iterator<AbstractDaemonClient> iter = getDaemons().get(Role.NN).iterator();
+ return (NNClient) iter.next();
+ }
+
+ public List<DNClient> getDNClients () {
+ return (List) getDaemons().get(Role.DN);
+ }
+
+ public DNClient getDNClient (String hostname) {
+ for (DNClient dnC : getDNClients()) {
+ if (dnC.getHostName().equals(hostname))
+ return dnC;
+ }
+ return null;
+ }
+
+ public static class HDFSProcessManager extends HadoopDaemonRemoteCluster {
+ public HDFSProcessManager() {
+ super(hdfsDaemonInfos);
+ }
+ }
+
+ public static class MultiUserHDFSProcessManager
+ extends MultiUserHadoopDaemonRemoteCluster {
+ public MultiUserHDFSProcessManager() {
+ super(hdfsDaemonInfos);
+ }
+ }
+
+
+ public static HDFSCluster createCluster(Configuration conf) throws Exception {
+ conf.addResource("system-test.xml");
+ String sockAddrStr = FileSystem.getDefaultUri(conf).getAuthority();
+ if (sockAddrStr == null) {
+ throw new IllegalArgumentException("Namenode IPC address is not set");
+ }
+ String[] splits = sockAddrStr.split(":");
+ if (splits.length != 2) {
+ throw new IllegalArgumentException(
+ "Namenode report IPC is not correctly configured");
+ }
+ nnHostName = splits[0];
+ DN_hostFileName = conf.get(CONF_HADOOP_DN_HOSTFILE_NAME, "slaves");
+
+ hdfsDaemonInfos = new ArrayList<HadoopDaemonInfo>();
+ hdfsDaemonInfos.add(new HadoopDaemonInfo("namenode",
+ Role.NN, Arrays.asList(new String[]{nnHostName})));
+ hdfsDaemonInfos.add(new HadoopDaemonInfo("datanode",
+ Role.DN, DN_hostFileName));
+
+ String implKlass = conf.get(CLUSTER_PROCESS_MGR_IMPL);
+ if (implKlass == null || implKlass.isEmpty()) {
+ implKlass = HDFSCluster.HDFSProcessManager.class.getName();
+ }
+ Class<ClusterProcessManager> klass =
+ (Class<ClusterProcessManager>) Class.forName(implKlass);
+ ClusterProcessManager clusterProcessMgr = klass.newInstance();
+ LOG.info("Created ClusterProcessManager as " + implKlass);
+ clusterProcessMgr.init(conf);
+ return new HDFSCluster(conf, clusterProcessMgr);
+ }
+}
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/HDFSDaemonClient.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.test.system.AbstractDaemonClient;
+import org.apache.hadoop.test.system.DaemonProtocol;
+import org.apache.hadoop.test.system.process.RemoteProcess;
+
+public abstract class HDFSDaemonClient<PROXY extends DaemonProtocol>
+ extends AbstractDaemonClient<PROXY> {
+
+ public HDFSDaemonClient(Configuration conf, RemoteProcess process)
+ throws IOException {
+ super(conf, process);
+ }
+
+ public String[] getHDFSDataDirs() throws IOException {
+ return getProxy().getDaemonConf().getStrings("dfs.data.dir");
+ }
+
+ public String getHDFSNameDirs() throws IOException {
+ return getProxy().getDaemonConf().getStrings("dfs.name.dir")[0];
+ }
+}
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNClient.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,71 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.test.system.process.RemoteProcess;
+
+public class NNClient extends HDFSDaemonClient<NNProtocol> {
+
+ NNProtocol proxy;
+
+ public NNClient(Configuration conf, RemoteProcess process) throws IOException {
+ super(conf, process);
+ }
+
+ @Override
+ public void connect() throws IOException {
+ if (isConnected())
+ return;
+ String sockAddrStr = FileSystem.getDefaultUri(getConf()).getAuthority();
+ if (sockAddrStr == null) {
+ throw new IllegalArgumentException("Namenode IPC address is not set");
+ }
+ String[] splits = sockAddrStr.split(":");
+ if (splits.length != 2) {
+ throw new IllegalArgumentException(
+ "Namenode report IPC is not correctly configured");
+ }
+ String port = splits[1];
+ String sockAddr = getHostName() + ":" + port;
+
+ InetSocketAddress bindAddr = NetUtils.createSocketAddr(sockAddr);
+ proxy = (NNProtocol) RPC.getProxy(NNProtocol.class, NNProtocol.versionID,
+ bindAddr, getConf());
+ setConnected(true);
+ }
+
+ @Override
+ public void disconnect() throws IOException {
+ RPC.stopProxy(proxy);
+ setConnected(false);
+ }
+
+ @Override
+ protected NNProtocol getProxy() {
+ return proxy;
+ }
+}
Added: hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java (added)
+++ hadoop/hdfs/trunk/src/test/system/java/org/apache/hadoop/hdfs/test/system/NNProtocol.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,36 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs.test.system;
+
+import org.apache.hadoop.hdfs.DFSConfigKeys;
+import org.apache.hadoop.security.KerberosInfo;
+import org.apache.hadoop.test.system.DaemonProtocol;
+
+/**
+ * Client side API exposed from Namenode.
+ * Actual implementations are likely to be injected
+ *
+ * The protocol has to be annotated so KerberosInfo can be filled in during
+ * creation of a ipc.Client connection
+ */
+@KerberosInfo(
+ serverPrincipal = DFSConfigKeys.DFS_NAMENODE_USER_NAME_KEY)
+public interface NNProtocol extends DaemonProtocol {
+ public static final long versionID = 1L;
+}
Added: hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java
URL: http://svn.apache.org/viewvc/hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java?rev=950323&view=auto
==============================================================================
--- hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java (added)
+++ hadoop/hdfs/trunk/src/test/system/test/org/apache/hadoop/hdfs/TestHL040.java Wed Jun 2 00:52:28 2010
@@ -0,0 +1,69 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdfs;
+
+import java.io.IOException;
+import java.util.Collection;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.test.system.DNClient;
+import org.apache.hadoop.hdfs.test.system.HDFSCluster;
+import org.apache.hadoop.hdfs.test.system.NNClient;
+import org.junit.After;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
+
+public class TestHL040 {
+ private HDFSCluster cluster = null;
+ private static final Log LOG = LogFactory.getLog(TestHL040.class);
+
+ public TestHL040() throws Exception {
+ }
+
+ @Before
+ public void setupUp() throws Exception {
+ cluster = HDFSCluster.createCluster(new Configuration());
+ cluster.setUp();
+ }
+
+ @After
+ public void tearDown() throws Exception {
+ cluster.tearDown();
+ }
+
+ @Test
+ public void testConnect() throws IOException {
+ LOG.info("Staring TestHL040: connecting to the HDFSCluster ");
+ LOG.info("================ Getting namenode info ================");
+ NNClient dfsMaster = cluster.getNNClient();
+ LOG.info("Process info of namenode " + dfsMaster.getHostName() + " is: " +
+ dfsMaster.getProcessInfo());
+ LOG.info("================ Getting datanode info ================");
+ Collection<DNClient> clients = cluster.getDNClients();
+ for (DNClient dnC : clients) {
+ LOG.info("Process info of datanode " + dnC.getHostName() + " is: " +
+ dnC.getProcessInfo());
+ Assert.assertNotNull("Datanode process info isn't suppose to be null",
+ dnC.getProcessInfo());
+ }
+ }
+}