You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by co...@apache.org on 2009/10/13 23:54:49 UTC
svn commit: r824942 - in /hadoop/common/trunk: ./ .eclipse.templates/ ivy/
src/ src/test/ src/test/aop/ src/test/aop/build/ src/test/aop/org/
src/test/aop/org/apache/ src/test/aop/org/apache/hadoop/
src/test/aop/org/apache/hadoop/fi/
Author: cos
Date: Tue Oct 13 21:54:48 2009
New Revision: 824942
URL: http://svn.apache.org/viewvc?rev=824942&view=rev
Log:
HADOOP-6204. Implementing aspects development and fault injeciton framework for Hadoop. Contributed by Konstantin Boudnik
Added:
hadoop/common/trunk/src/test/aop/
hadoop/common/trunk/src/test/aop/build/
hadoop/common/trunk/src/test/aop/build/aop.xml
hadoop/common/trunk/src/test/aop/org/
hadoop/common/trunk/src/test/aop/org/apache/
hadoop/common/trunk/src/test/aop/org/apache/hadoop/
hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/
hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java
hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
hadoop/common/trunk/src/test/fi-site.xml
Modified:
hadoop/common/trunk/.eclipse.templates/.classpath
hadoop/common/trunk/CHANGES.txt
hadoop/common/trunk/build.xml
hadoop/common/trunk/ivy.xml
hadoop/common/trunk/ivy/libraries.properties
hadoop/common/trunk/src/saveVersion.sh
Modified: hadoop/common/trunk/.eclipse.templates/.classpath
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/.eclipse.templates/.classpath?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/.eclipse.templates/.classpath (original)
+++ hadoop/common/trunk/.eclipse.templates/.classpath Tue Oct 13 21:54:48 2009
@@ -1,6 +1,7 @@
<?xml version="1.0" encoding="UTF-8"?>
<classpath>
<classpathentry kind="src" path="src/java"/>
+ <classpathentry kind="src" path="src/test/aop"/>
<classpathentry kind="src" path="src/test/core"/>
<classpathentry kind="con" path="org.eclipse.jdt.launching.JRE_CONTAINER"/>
<classpathentry kind="var" path="ANT_HOME/lib/ant.jar"/>
Modified: hadoop/common/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/CHANGES.txt?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/CHANGES.txt (original)
+++ hadoop/common/trunk/CHANGES.txt Tue Oct 13 21:54:48 2009
@@ -24,6 +24,9 @@
HADOOP-6305. Unify build property names to facilitate cross-projects
modifications (cos)
+ HADOOP-6204. Implementing aspects development and fault injeciton
+ framework for Hadoop (cos)
+
OPTIMIZATIONS
BUG FIXES
Modified: hadoop/common/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/build.xml?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/build.xml (original)
+++ hadoop/common/trunk/build.xml Tue Oct 13 21:54:48 2009
@@ -252,7 +252,7 @@
</copy>
<exec executable="sh">
- <arg line="src/saveVersion.sh ${version}"/>
+ <arg line="src/saveVersion.sh ${version} ${build.dir}"/>
</exec>
<exec executable="sh">
@@ -260,6 +260,8 @@
</exec>
</target>
+ <import file="${test.src.dir}/aop/build/aop.xml"/>
+
<!-- ====================================================== -->
<!-- Compile the Java files -->
<!-- ====================================================== -->
@@ -381,7 +383,8 @@
<tar compression="gzip" destfile="${build.classes}/bin.tgz">
<tarfileset dir="bin" mode="755"/>
</tar>
- <jar jarfile="${hadoop-core.jar}"
+ <property name="jar.properties.list" value="commons-logging.properties, log4j.properties, hadoop-metrics.properties" />
+ <jar jarfile="${build.dir}/${final.name}.jar"
basedir="${build.classes}">
<manifest>
<section name="org/apache/hadoop">
@@ -390,13 +393,11 @@
<attribute name="Implementation-Vendor" value="Apache"/>
</section>
</manifest>
- <fileset file="${conf.dir}/commons-logging.properties"/>
- <fileset file="${conf.dir}/log4j.properties"/>
- <fileset file="${conf.dir}/hadoop-metrics.properties"/>
+ <fileset dir="${conf.dir}" includes="${jar.properties.list}" />
+ <fileset file="${jar.extra.properties.list}" />
</jar>
</target>
-
<!-- ================================================================== -->
<!-- Make the Hadoop metrics jar. (for use outside Hadoop) -->
<!-- ================================================================== -->
@@ -431,6 +432,9 @@
<!-- ================================================================== -->
<!-- Compile test code -->
<!-- ================================================================== -->
+ <!-- This is a wrapper for fault-injection needs-->
+ <target name="compile-tests" depends="compile-core-test"/>
+
<target name="compile-core-test" depends="compile-core-classes, ivy-retrieve-test, generate-test-records, generate-avro-records">
<mkdir dir="${test.core.build.classes}"/>
<javac
@@ -498,6 +502,75 @@
</target>
<!-- ================================================================== -->
+ <!-- Fault injection customization section.
+ These targets ought to be copied over to other projects and modified
+ as needed -->
+ <!-- ================================================================== -->
+ <target name="run-test-core-fault-inject" depends="injectfaults"
+ description="Run full set of the unit tests with fault injection">
+ <macro-run-tests-fault-inject target.name="run-test-core"
+ testcasesonly="false"/>
+ </target>
+
+ <target name="jar-test-fault-inject" depends="injectfaults"
+ description="Make hadoop-test-fi.jar">
+ <macro-jar-test-fault-inject
+ target.name="jar-test"
+ jar.final.name="test.final.name"
+ jar.final.value="${test.final.name}-fi" />
+ </target>
+
+ <target name="jar-fault-inject" depends="injectfaults"
+ description="Make hadoop-fi.jar">
+ <macro-jar-fault-inject
+ target.name="jar"
+ jar.final.name="final.name"
+ jar.final.value="${final.name}-fi" />
+ </target>
+
+ <!--This target is not included into the the top level list of target
+ for it serves a special "regression" testing purpose of non-FI tests in
+ FI environment -->
+ <target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
+ <fail unless="testcase">Can't run this target without -Dtestcase setting!
+ </fail>
+ <macro-run-tests-fault-inject target.name="run-test-core"
+ testcasesonly="true"/>
+ </target>
+ <!-- ================================================================== -->
+ <!-- End of Fault injection customization section -->
+ <!-- ================================================================== -->
+
+ <condition property="tests.notestcase">
+ <and>
+ <isfalse value="${test.fault.inject}"/>
+ <not>
+ <isset property="testcase"/>
+ </not>
+ </and>
+ </condition>
+ <condition property="tests.notestcase.fi">
+ <and>
+ <not>
+ <isset property="testcase" />
+ </not>
+ <istrue value="${test.fault.inject}" />
+ </and>
+ </condition>
+ <condition property="tests.testcase">
+ <and>
+ <isfalse value="${test.fault.inject}" />
+ <isset property="testcase" />
+ </and>
+ </condition>
+ <condition property="tests.testcase.fi">
+ <and>
+ <istrue value="${test.fault.inject}" />
+ <isset property="testcase" />
+ </and>
+ </condition>
+
+ <!-- ================================================================== -->
<!-- Run unit tests -->
<!-- ================================================================== -->
<target name="run-test-core" depends="compile-core-test" description="Run core unit tests">
@@ -508,6 +581,8 @@
<mkdir dir="${test.log.dir}"/>
<copy file="${test.src.dir}/hadoop-policy.xml"
todir="${test.build.extraconf}" />
+ <copy file="${test.src.dir}/fi-site.xml"
+ todir="${test.build.extraconf}" />
<junit showoutput="${test.output}"
printsummary="${test.junit.printsummary}"
haltonfailure="${test.junit.haltonfailure}"
@@ -536,13 +611,30 @@
<propertyref name="compile.c++"/>
</syspropertyset>
<classpath refid="test.classpath"/>
+ <syspropertyset id="FaultProbabilityProperties">
+ <propertyref regex="fi.*"/>
+ </syspropertyset>
<formatter type="${test.junit.output.format}" />
- <batchtest todir="${test.build.dir}" unless="testcase">
+ <batchtest todir="${test.build.dir}" if="tests.notestcase">
<fileset dir="${test.src.dir}/core"
includes="**/${test.include}.java"
excludes="**/${test.exclude}.java" />
</batchtest>
- <batchtest todir="${test.build.dir}" if="testcase">
+ <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
+ <fileset dir="${test.src.dir}/aop"
+ includes="**/${test.include}.java"
+ excludes="**/${test.exclude}.java" />
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="tests.testcase">
+ <fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
+ </batchtest>
+ <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
+ <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+ </batchtest>
+ <!--The following batch is for very special occasions only when
+ a non-FI tests are needed to be executed against FI-environment -->
+ <batchtest todir="${test.build.dir}" if="special.fi.testcasesonly">
+ <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
<fileset dir="${test.src.dir}/core" includes="**/${testcase}.java"/>
</batchtest>
</junit>
@@ -566,6 +658,7 @@
<delete file="${test.build.dir}/testsfailed"/>
<property name="continueOnFailure" value="true"/>
<antcall target="run-test-core"/>
+ <antcall target="run-test-core-fault-inject"/>
<available file="${test.build.dir}/testsfailed" property="testsfailed"/>
<fail if="testsfailed">Tests failed!</fail>
</target>
@@ -1045,7 +1138,7 @@
<!-- ================================================================== -->
<!-- Clean. Delete the build files, and their directories -->
<!-- ================================================================== -->
- <target name="clean" depends="clean-contrib" description="Clean. Delete the build files, and their directories">
+ <target name="clean" depends="clean-contrib, clean-fi" description="Clean. Delete the build files, and their directories">
<delete dir="${build.dir}"/>
<delete dir="${docs.src}/build"/>
<delete dir="${src.docs.cn}/build"/>
Modified: hadoop/common/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy.xml?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/ivy.xml (original)
+++ hadoop/common/trunk/ivy.xml Tue Oct 13 21:54:48 2009
@@ -285,6 +285,16 @@
name="paranamer-ant"
rev="${paranamer.version}"
conf="common->default"/>
+ <dependency org="org.aspectj"
+ name="aspectjrt"
+ rev="${aspectj.version}"
+ conf="common->default">
+ </dependency>
+ <dependency org="org.aspectj"
+ name="aspectjtools"
+ rev="${aspectj.version}"
+ conf="common->default">
+ </dependency>
</dependencies>
-
+
</ivy-module>
Modified: hadoop/common/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/ivy/libraries.properties?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/ivy/libraries.properties (original)
+++ hadoop/common/trunk/ivy/libraries.properties Tue Oct 13 21:54:48 2009
@@ -77,3 +77,5 @@
xmlenc.version=0.52
xerces.version=1.4.4
+
+aspectj.version=1.6.4
Modified: hadoop/common/trunk/src/saveVersion.sh
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/saveVersion.sh?rev=824942&r1=824941&r2=824942&view=diff
==============================================================================
--- hadoop/common/trunk/src/saveVersion.sh (original)
+++ hadoop/common/trunk/src/saveVersion.sh Tue Oct 13 21:54:48 2009
@@ -21,6 +21,7 @@
unset LANG
unset LC_CTYPE
version=$1
+build_dir=$2
user=`whoami`
date=`date`
cwd=`pwd`
@@ -43,12 +44,12 @@
fi
srcChecksum=`find src -name '*.java' | LC_ALL=C sort | xargs md5sum | md5sum | cut -d ' ' -f 1`
-mkdir -p build/src/org/apache/hadoop
+mkdir -p $build_dir/src/org/apache/hadoop
cat << EOF | \
sed -e "s/VERSION/$version/" -e "s/USER/$user/" -e "s/DATE/$date/" \
-e "s|URL|$url|" -e "s/REV/$revision/" \
-e "s|BRANCH|$branch|" -e "s/SRCCHECKSUM/$srcChecksum/" \
- > build/src/org/apache/hadoop/package-info.java
+ > $build_dir/src/org/apache/hadoop/package-info.java
/*
* Generated by src/saveVersion.sh
*/
Added: hadoop/common/trunk/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/aop/build/aop.xml?rev=824942&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/aop/build/aop.xml (added)
+++ hadoop/common/trunk/src/test/aop/build/aop.xml Tue Oct 13 21:54:48 2009
@@ -0,0 +1,115 @@
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<project name="aspects">
+ <property name="build-fi.dir" value="${basedir}/build-fi"/>
+ <property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
+ <property name="aspectversion" value="1.6.4"/>
+ <property file="${basedir}/build.properties"/>
+
+ <!--All Fault Injection (FI) related targets are located in this session -->
+
+ <target name="clean-fi">
+ <delete dir="${build-fi.dir}"/>
+ </target>
+
+ <!-- Weaving aspects in place
+ Later on one can run 'ant jar-fault-inject' to create
+ Hadoop jar file with instrumented classes
+ -->
+ <target name="compile-fault-inject"
+ depends="compile-core-classes, compile-tests">
+ <!-- AspectJ task definition -->
+ <taskdef
+ resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
+ <classpath>
+ <pathelement
+ location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
+ </classpath>
+ </taskdef>
+ <echo message="Start weaving aspects in place"/>
+ <iajc
+ encoding="${build.encoding}"
+ srcdir="${java.src.dir};${build.src};${test.src.dir}/aop"
+ includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
+ excludes="org/apache/hadoop/record/**/*"
+ destDir="${build.classes}"
+ debug="${javac.debug}"
+ target="${javac.version}"
+ source="${javac.version}"
+ deprecation="${javac.deprecation}">
+ <classpath refid="test.classpath"/>
+ </iajc>
+ <echo message="Weaving of aspects is finished"/>
+ </target>
+
+ <target name="injectfaults"
+ description="Instrument classes with faults and other AOP advices">
+ <subant buildpath="${basedir}" target="compile-fault-inject">
+ <property name="build.dir" value="${build-fi.dir}"/>
+ </subant>
+ </target>
+
+ <macrodef name="macro-run-tests-fault-inject">
+ <attribute name="target.name" />
+ <attribute name="testcasesonly" />
+ <sequential>
+ <subant buildpath="build.xml" target="@{target.name}">
+ <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="test.fault.inject" value="yes"/>
+ <property name="test.include" value="TestFi*"/>
+ <!-- This one is needed for the special "regression" target only -->
+ <property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <!-- ================================================================== -->
+ <!-- Make hadoop-fi.jar including all Fault injected artifacts -->
+ <!-- ================================================================== -->
+ <macrodef name="macro-jar-fault-inject">
+ <attribute name="target.name" />
+ <attribute name="jar.final.name" />
+ <attribute name="jar.final.value" />
+ <sequential>
+ <subant buildpath="build.xml" target="@{target.name}">
+ <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="@{jar.final.name}" value="@{jar.final.value}"/>
+ <property name="jar.extra.properties.list"
+ value="${test.src.dir}/fi-site.xml" />
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <!-- ================================================================== -->
+ <!-- Make test jar files including all Fault Injected artifacts -->
+ <!-- ================================================================== -->
+
+ <macrodef name="macro-jar-test-fault-inject">
+ <attribute name="target.name" />
+ <attribute name="jar.final.name" />
+ <attribute name="jar.final.value" />
+ <sequential>
+ <subant buildpath="build.xml" target="@{target.name}">
+ <property name="build.dir" value="${build-fi.dir}"/>
+ <property name="@{jar.final.name}"
+ value="@{jar.final.value}"/>
+ </subant>
+ </sequential>
+ </macrodef>
+
+ <!--End of Fault Injection (FI) related session-->
+</project>
Added: hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java?rev=824942&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java (added)
+++ hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java Tue Oct 13 21:54:48 2009
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This class wraps the logic around fault injection configuration file
+ * Default file is expected to be found in src/test/fi-site.xml
+ * This default file should be copied by JUnit Ant's tasks to
+ * build/test/extraconf folder before tests are ran
+ * An alternative location can be set through
+ * -Dfi.config=<file_name>
+ */
+public class FiConfig {
+ private static final String CONFIG_PARAMETER = ProbabilityModel.FPROB_NAME + "config";
+ private static final String DEFAULT_CONFIG = "fi-site.xml";
+ private static Configuration conf;
+ static {
+ if (conf == null) {
+ conf = new Configuration(false);
+ String configName = System.getProperty(CONFIG_PARAMETER, DEFAULT_CONFIG);
+ conf.addResource(configName);
+ }
+ }
+
+ /**
+ * Method provides access to local Configuration
+ *
+ * @return Configuration initialized with fault injection's parameters
+ */
+ public static Configuration getConfig() {
+ return conf;
+ }
+}
Added: hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java?rev=824942&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java (added)
+++ hadoop/common/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java Tue Oct 13 21:54:48 2009
@@ -0,0 +1,106 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import java.util.Random;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This class is responsible for the decision of when a fault
+ * has to be triggered within a class of Hadoop
+ *
+ * Default probability of injection is set to 0%. To change it
+ * one can set the sys. prop. -Dfi.*=<new probability level>
+ * Another way to do so is to set this level through FI config file,
+ * located under src/test/fi-site.conf
+ *
+ * To change the level one has to specify the following sys,prop.:
+ * -Dfi.<name of fault location>=<probability level> in the runtime
+ * Probability level is specified by a float between 0.0 and 1.0
+ *
+ * <name of fault location> might be represented by a short classname
+ * or otherwise. This decision is left up to the discretion of aspects
+ * developer, but has to be consistent through the code
+ */
+public class ProbabilityModel {
+ private static Random generator = new Random();
+ private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
+
+ static final String FPROB_NAME = "fi.";
+ private static final String ALL_PROBABILITIES = FPROB_NAME + "*";
+ private static final float DEFAULT_PROB = 0.00f; //Default probability is 0%
+ private static final float MAX_PROB = 1.00f; // Max probability is 100%
+
+ private static Configuration conf = FiConfig.getConfig();
+
+ static {
+ // Set new default probability if specified through a system.property
+ // If neither is specified set default probability to DEFAULT_PROB
+ conf.set(ALL_PROBABILITIES,
+ System.getProperty(ALL_PROBABILITIES,
+ conf.get(ALL_PROBABILITIES, Float.toString(DEFAULT_PROB))));
+
+ LOG.info(ALL_PROBABILITIES + "=" + conf.get(ALL_PROBABILITIES));
+ }
+
+ /**
+ * Simplistic method to check if we have reached the point of injection
+ * @param klassName is the name of the probability level to check.
+ * If a configuration has been set for "fi.myClass" then you can check if the
+ * inject criteria has been reached by calling this method with "myClass"
+ * string as its parameter
+ * @return true if the probability threshold has been reached; false otherwise
+ */
+ public static boolean injectCriteria(String klassName) {
+ boolean trigger = false;
+ if (generator.nextFloat() < getProbability(klassName)) {
+ trigger = true;
+ }
+ return trigger;
+ }
+
+ /**
+ * This primitive checks for arbitrary set of desired probability. If the
+ * level hasn't been set method will return default setting.
+ * The probability expected to be set as an float between 0.0 and 1.0
+ * @param klass is the name of the resource
+ * @return float representation of configured probability level of
+ * the requested resource or default value if hasn't been set
+ */
+ protected static float getProbability(final String klass) {
+ String newProbName = FPROB_NAME + klass;
+
+ String newValue = System.getProperty(newProbName, conf.get(ALL_PROBABILITIES));
+ if (newValue != null && !newValue.equals(conf.get(newProbName)))
+ conf.set(newProbName, newValue);
+
+ float ret = conf.getFloat(newProbName,
+ conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
+ LOG.debug("Request for " + newProbName + " returns=" + ret);
+ // Make sure that probability level is valid.
+ if (ret < DEFAULT_PROB || ret > MAX_PROB) {
+ LOG.info("Probability level is incorrect. Default value is set");
+ ret = conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB);
+ }
+
+ return ret;
+ }
+}
Added: hadoop/common/trunk/src/test/fi-site.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/src/test/fi-site.xml?rev=824942&view=auto
==============================================================================
--- hadoop/common/trunk/src/test/fi-site.xml (added)
+++ hadoop/common/trunk/src/test/fi-site.xml Tue Oct 13 21:54:48 2009
@@ -0,0 +1,31 @@
+<?xml version="1.0"?>
+<!--
+ Licensed to the Apache Software Foundation (ASF) under one or more
+ contributor license agreements. See the NOTICE file distributed with
+ this work for additional information regarding copyright ownership.
+ The ASF licenses this file to You under the Apache License, Version 2.0
+ (the "License"); you may not use this file except in compliance with
+ the License. You may obtain a copy of the License at
+
+ http://www.apache.org/licenses/LICENSE-2.0
+
+ Unless required by applicable law or agreed to in writing, software
+ distributed under the License is distributed on an "AS IS" BASIS,
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ See the License for the specific language governing permissions and
+ limitations under the License.
+-->
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put fault injection specific property overrides in this file. -->
+
+<configuration>
+ <property>
+ <name>fi.*</name>
+ <value>0.00</value>
+ <description>
+ Default probability level for all injected faults specified
+ as a floating number between 0 and 1.00
+ </description>
+ </property>
+</configuration>