You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by co...@apache.org on 2009/12/14 23:26:04 UTC

svn commit: r890505 - in /hadoop/mapreduce/trunk: ./ ivy/ src/contrib/vaidya/ src/test/ src/test/aop/ src/test/aop/build/ src/test/aop/org/ src/test/aop/org/apache/ src/test/aop/org/apache/hadoop/ src/test/aop/org/apache/hadoop/fi/

Author: cos
Date: Mon Dec 14 22:26:03 2009
New Revision: 890505

URL: http://svn.apache.org/viewvc?rev=890505&view=rev
Log:
MAPREDUCE-1084. Implementing aspects development and fault injeciton framework for MapReduce. Contributed by Sreekanth Ramakrishnan

Added:
    hadoop/mapreduce/trunk/src/test/aop/
    hadoop/mapreduce/trunk/src/test/aop/build/
    hadoop/mapreduce/trunk/src/test/aop/build/aop.xml
    hadoop/mapreduce/trunk/src/test/aop/org/
    hadoop/mapreduce/trunk/src/test/aop/org/apache/
    hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/
    hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/
    hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java
    hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
    hadoop/mapreduce/trunk/src/test/fi-site.xml
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/build.xml
    hadoop/mapreduce/trunk/ivy.xml
    hadoop/mapreduce/trunk/ivy/libraries.properties
    hadoop/mapreduce/trunk/src/contrib/vaidya/build.xml

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=890505&r1=890504&r2=890505&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Mon Dec 14 22:26:03 2009
@@ -68,6 +68,9 @@
 
     MAPREDUCE-1050. Introduce a mock object testing framework. (tomwhite)
 
+    MAPREDUCE-1084. Implementing aspects development and fault injeciton
+    framework for MapReduce. (Sreekanth Ramakrishnan via cos)
+
   OPTIMIZATIONS
 
     MAPREDUCE-270. Fix the tasktracker to optionally send an out-of-band

Modified: hadoop/mapreduce/trunk/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/build.xml?rev=890505&r1=890504&r2=890505&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/build.xml (original)
+++ hadoop/mapreduce/trunk/build.xml Mon Dec 14 22:26:03 2009
@@ -439,6 +439,7 @@
       <fileset file="${conf.dir}/log4j.properties"/>
       <fileset file="${conf.dir}/hadoop-metrics.properties"/> -->
       <zipfileset dir="${build.webapps}" prefix="webapps"/>
+      <fileset file="${jar.extra.properties.list}" />
     </jar>
   </target>
 
@@ -601,6 +602,12 @@
         <syspropertyset dynamic="no">
           <propertyref name="compile.c++"/>
         </syspropertyset>
+        
+        <!-- Pass probability specifications to the spawn JVM -->
+        <syspropertyset id="FaultProbabilityProperties">
+          <propertyref regex="fi.*"/>
+        </syspropertyset>
+        
         <classpath refid="test.classpath"/>
         <formatter type="${test.junit.output.format}" />
         <batchtest todir="${test.build.dir}" unless="testcase">
@@ -614,6 +621,24 @@
           <fileset dir="${test.src.dir}/mapred" includes="**/${testcase}.java"/>
           <fileset dir="${test.src.dir}/unit" includes="**/${testcase}.java"/>
         </batchtest>
+        <!--batch test to test all the testcases in aop folder with fault 
+        injection-->
+        <batchtest todir="${test.build.dir}" if="tests.notestcase.fi">
+          <fileset dir="${test.src.dir}/aop"
+            includes="**/${test.include}.java"
+            excludes="**/${test.exclude}.java" />
+        </batchtest>
+        <!-- batch test for testing a single test case in aop folder with
+        fault injection-->
+        <batchtest todir="${test.build.dir}" if="tests.testcase.fi">
+         <fileset dir="${test.src.dir}/aop" includes="**/${testcase}.java"/>
+        </batchtest>
+         <!--The following batch is for very special occasions only when
+         a non-FI tests are needed to be executed against FI-environment -->
+         <batchtest todir="${test.build.dir}" if="tests.testcaseonly.fi">
+          <fileset dir="${test.src.dir}/mapred" 
+            includes="**/${testcase}.java"/>
+        </batchtest>
       </junit>
       <antcall target="checkfailure"/>
     </sequential>
@@ -1725,6 +1750,86 @@
         failonerror="yes">
     </exec>
   </target>
-
   <!-- end of task-controller targets -->
+  
+  <!-- Begining of fault-injection targets-->
+  <import file="${test.src.dir}/aop/build/aop.xml"/>
+  
+  <!-- declaring mapred.src.dir as java.src.dir for aop.xml -->
+  <property name="java.src.dir" value="${src.dir}/java"/>
+  
+  <!-- target dependency from aop.xml -->
+  <target name="-classes-compilation" 
+    depends="compile-mapred-classes, compile-mapred-test"/>
+  
+  <target name="jar-test-fault-inject" depends="jar-mapred-test-fault-inject"
+            description="Make hadoop-mapred-test-fi.jar files"/>
+  
+  <!-- target to build test-fi.jar-->
+  <target name="jar-mapred-test-fault-inject" depends="injectfaults"
+    description="Make hadoop-mapred-test-fi.jar">
+    <macro-jar-test-fault-inject target.name="jar-test" 
+      jar.final.name="test.final.name"
+      jar.final.value="${name}-test-${version}-fi"/>
+  </target>
+  
+  <!-- target to build the hadoop-fi.jar -->
+  <target name="jar-fault-inject" depends="injectfaults"
+    description="Make hadoop-fi.jar">
+    <macro-jar-fault-inject
+      target.name="jar"
+      jar.final.name="final.name"
+      jar.final.value="${final.name}-fi" />
+  </target>
+  
+  <!-- target to run fault injected test cases will run entire mapred test 
+       suite-->
+  <target name="run-test-mapred-fault-inject" depends="injectfaults"
+    description="Run full suite of unit tests with fault injection">
+    <macro-run-tests-fault-inject target.name="run-test-mapred"
+      testcasesonly="false"/>
+  </target>
+
+  <!-- target to run non-FI tests in a FI environment-->
+  <target name="run-fault-inject-with-testcaseonly" depends="injectfaults">
+    <fail unless="testcase">
+    Can't run this target without -Dtestcase setting!
+    </fail>
+    <macro-run-tests-fault-inject target.name="run-test-mapred"
+      testcasesonly="true"/>
+  </target>
+  <condition property="tests.notestcase">
+    <and>
+      <isfalse value="${test.fault.inject}"/>
+      <not>
+        <isset property="testcase"/>
+      </not>
+    </and>
+  </condition>
+  <condition property="tests.notestcase.fi">
+    <and>
+      <not>
+        <isset property="testcase"/>
+      </not>
+      <istrue value="${test.fault.inject}"/>
+    </and>
+  </condition>
+  <condition property="test.testcase">
+    <and>
+      <isfalse value="${test.fault.inject}"/>
+      <isset property="testcase"/>
+    </and>
+  </condition>
+  <condition property="tests.testcaseonly.fi">
+    <istrue value="${special.fi.testcasesonly}" />
+  </condition>
+  <condition property="tests.testcase.fi">
+    <and>
+      <istrue value="${test.fault.inject}" />
+      <isset property="testcase" />
+      <isfalse value="${special.fi.testcasesonly}" />
+    </and>
+  </condition>
+  <!-- End of fault injection targets-->
+  
 </project>

Modified: hadoop/mapreduce/trunk/ivy.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy.xml?rev=890505&r1=890504&r2=890505&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy.xml (original)
+++ hadoop/mapreduce/trunk/ivy.xml Mon Dec 14 22:26:03 2009
@@ -96,6 +96,11 @@
                conf="common->default"/>
    <dependency org="org.mockito" name="mockito-all" rev="${mockito-all.version}" 
                conf="test->default"/>
+   <!-- dependency addition for the fault injection -->
+   <dependency org="org.aspectj" name="aspectjrt" rev="${aspectj.version}"
+               conf="common->default"/>
+   <dependency org="org.aspectj" name="aspectjtools" rev="${aspectj.version}"
+               conf="common->default"/>
 
  </dependencies>
   

Modified: hadoop/mapreduce/trunk/ivy/libraries.properties
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/ivy/libraries.properties?rev=890505&r1=890504&r2=890505&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/ivy/libraries.properties (original)
+++ hadoop/mapreduce/trunk/ivy/libraries.properties Mon Dec 14 22:26:03 2009
@@ -17,6 +17,9 @@
 apacheant.version=1.7.1
 ant-task.version=2.0.10
 
+#Aspectj depedency for Fault injection
+aspectj.version=1.6.5
+
 avro.version=1.2.0
 
 checkstyle.version=4.2
@@ -81,3 +84,4 @@
 
 xmlenc.version=0.52
 xerces.version=1.4.4
+

Modified: hadoop/mapreduce/trunk/src/contrib/vaidya/build.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/vaidya/build.xml?rev=890505&r1=890504&r2=890505&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/vaidya/build.xml (original)
+++ hadoop/mapreduce/trunk/src/contrib/vaidya/build.xml Mon Dec 14 22:26:03 2009
@@ -20,7 +20,6 @@
 <project name="vaidya" default="jar">
 
 	<import file="../build-contrib.xml" />
-        <import file="../../../build.xml" />
 
 	<target name="init">
 		<mkdir dir="${build.dir}" />

Added: hadoop/mapreduce/trunk/src/test/aop/build/aop.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/aop/build/aop.xml?rev=890505&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/aop/build/aop.xml (added)
+++ hadoop/mapreduce/trunk/src/test/aop/build/aop.xml Mon Dec 14 22:26:03 2009
@@ -0,0 +1,264 @@
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project name="aspects">
+  <property name="build-fi.dir" value="${basedir}/build-fi"/>
+  <property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
+  <property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
+  <property name="aspectversion" value="1.6.5"/>
+  <property file="${basedir}/build.properties"/>
+
+  <!--All Fault Injection (FI) related targets are located in this session -->
+    
+  <target name="clean-fi">
+    <delete dir="${build-fi.dir}"/>
+  </target>
+  
+  <!-- Weaving aspects in place
+  	Later on one can run 'ant jar-fault-inject' to create
+  	Hadoop jar file with instrumented classes
+  -->
+  <!-- Target -classes-compilation has to be defined in build.xml and
+  needs to depend on classes compilation and test classes compilation
+  targets. This is a poor man parametrization for targets -->
+  <target name="compile-fault-inject" depends="-classes-compilation" >
+    <!-- AspectJ task definition -->
+    <taskdef
+      resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
+      <classpath>
+        <pathelement 
+          location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
+      </classpath>
+    </taskdef>
+    <echo message="Start weaving aspects in place"/>
+    <iajc
+      encoding="${build.encoding}" 
+      srcdir="${java.src.dir};${build.src};${test.src.dir}/aop" 
+      includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
+      excludes="org/apache/hadoop/record/**/*"
+      destDir="${build.classes}"
+      debug="${javac.debug}"
+      target="${javac.version}"
+      source="${javac.version}"
+      deprecation="${javac.deprecation}">
+      <classpath refid="test.classpath"/>
+    </iajc>
+    <loadfile property="injection.failure" srcfile="${compile-inject.output}">
+      <filterchain>
+        <linecontainsregexp>
+          <regexp pattern='iajc.*warning'/>
+        </linecontainsregexp>
+      </filterchain>
+    </loadfile>
+    <fail if="injection.failure">
+      Broken binding of advises: ${line.separator}${injection.failure}
+    </fail>
+    <echo message="Weaving of aspects is finished"/>
+  </target>
+
+  <target name="injectfaults" 
+  	description="Instrument classes with faults and other AOP advices">
+    <!--mkdir to prevent <subant> failure in case the folder has been removed-->
+    <mkdir dir="${build-fi.dir}"/>
+    <delete file="${compile-inject.output}"/>
+    <subant buildpath="${basedir}" target="compile-fault-inject"
+    	output="${compile-inject.output}">
+      <property name="build.dir" value="${build-fi.dir}"/>
+    </subant>
+  </target>
+
+  <macrodef name="macro-run-tests-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="testcasesonly" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="test.fault.inject" value="yes"/>
+        <property name="test.include" value="TestFi*"/>
+        <!-- This one is needed for the special "regression" target only -->
+        <property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
+      </subant>
+    </sequential>
+  </macrodef>
+	
+  <!-- ================================================================== -->
+  <!-- Make hadoop-fi.jar including all Fault injected artifacts         -->
+  <!-- ================================================================== -->
+  <macrodef name="macro-jar-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="jar.final.name" />
+    <attribute name="jar.final.value" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="@{jar.final.name}" value="@{jar.final.value}"/>
+        <property name="jar.extra.properties.list" 
+        	  value="${test.src.dir}/fi-site.xml" />
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <!-- ================================================================== -->
+  <!-- Make test jar files including all Fault Injected artifacts         -->
+  <!-- ================================================================== -->
+
+  <macrodef name="macro-jar-test-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="jar.final.name" />
+    <attribute name="jar.final.value" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="@{jar.final.name}" 
+                  value="@{jar.final.value}"/>
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <!--End of Fault Injection (FI) related session-->
+</project>
+<!--
+   Licensed to the Apache Software Foundation (ASF) under one or more
+   contributor license agreements.  See the NOTICE file distributed with
+   this work for additional information regarding copyright ownership.
+   The ASF licenses this file to You under the Apache License, Version 2.0
+   (the "License"); you may not use this file except in compliance with
+   the License.  You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+-->
+<project name="aspects">
+  <property name="build-fi.dir" value="${basedir}/build-fi"/>
+  <property name="hadoop-fi.jar" location="${build.dir}/${final.name}-fi.jar" />
+  <property name="compile-inject.output" value="${build-fi.dir}/compile-fi.log"/>
+  <property name="aspectversion" value="1.6.5"/>
+  <property file="${basedir}/build.properties"/>
+
+  <!--All Fault Injection (FI) related targets are located in this session -->
+    
+  <target name="clean-fi">
+    <delete dir="${build-fi.dir}"/>
+  </target>
+  
+  <!-- Weaving aspects in place
+  	Later on one can run 'ant jar-fault-inject' to create
+  	Hadoop jar file with instrumented classes
+  -->
+  <!-- Target -classes-compilation has to be defined in build.xml and
+  needs to depend on classes compilation and test classes compilation
+  targets. This is a poor man parametrization for targets -->
+  <target name="compile-fault-inject" depends="-classes-compilation" >
+    <!-- AspectJ task definition -->
+    <taskdef
+      resource="org/aspectj/tools/ant/taskdefs/aspectjTaskdefs.properties">
+      <classpath>
+        <pathelement 
+          location="${common.ivy.lib.dir}/aspectjtools-${aspectversion}.jar"/>
+      </classpath>
+    </taskdef>
+    <echo message="Start weaving aspects in place"/>
+    <iajc
+      encoding="${build.encoding}" 
+      srcdir="${java.src.dir};${build.src};${test.src.dir}/aop" 
+      includes="org/apache/hadoop/**/*.java, org/apache/hadoop/**/*.aj"
+      excludes="org/apache/hadoop/record/**/*"
+      destDir="${build.classes}"
+      debug="${javac.debug}"
+      target="${javac.version}"
+      source="${javac.version}"
+      deprecation="${javac.deprecation}">
+      <classpath refid="test.classpath"/>
+    </iajc>
+    <loadfile property="injection.failure" srcfile="${compile-inject.output}">
+      <filterchain>
+        <linecontainsregexp>
+          <regexp pattern='iajc.*warning'/>
+        </linecontainsregexp>
+      </filterchain>
+    </loadfile>
+    <fail if="injection.failure">
+      Broken binding of advises: ${line.separator}${injection.failure}
+    </fail>
+    <echo message="Weaving of aspects is finished"/>
+  </target>
+
+  <target name="injectfaults" 
+  	description="Instrument classes with faults and other AOP advices">
+    <!--mkdir to prevent <subant> failure in case the folder has been removed-->
+    <mkdir dir="${build-fi.dir}"/>
+    <delete file="${compile-inject.output}"/>
+    <subant buildpath="${basedir}" target="compile-fault-inject"
+    	output="${compile-inject.output}">
+      <property name="build.dir" value="${build-fi.dir}"/>
+    </subant>
+  </target>
+
+  <macrodef name="macro-run-tests-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="testcasesonly" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="test.fault.inject" value="yes"/>
+        <property name="test.include" value="TestFi*"/>
+        <!-- This one is needed for the special "regression" target only -->
+        <property name="special.fi.testcasesonly" value="@{testcasesonly}"/>
+      </subant>
+    </sequential>
+  </macrodef>
+	
+  <!-- ================================================================== -->
+  <!-- Make hadoop-fi.jar including all Fault injected artifacts         -->
+  <!-- ================================================================== -->
+  <macrodef name="macro-jar-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="jar.final.name" />
+    <attribute name="jar.final.value" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="@{jar.final.name}" value="@{jar.final.value}"/>
+        <property name="jar.extra.properties.list" 
+        	  value="${test.src.dir}/fi-site.xml" />
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <!-- ================================================================== -->
+  <!-- Make test jar files including all Fault Injected artifacts         -->
+  <!-- ================================================================== -->
+
+  <macrodef name="macro-jar-test-fault-inject">
+    <attribute name="target.name" />
+    <attribute name="jar.final.name" />
+    <attribute name="jar.final.value" />
+    <sequential>
+      <subant buildpath="build.xml" target="@{target.name}">
+        <property name="build.dir" value="${build-fi.dir}"/>
+        <property name="@{jar.final.name}" 
+                  value="@{jar.final.value}"/>
+      </subant>
+    </sequential>
+  </macrodef>
+
+  <!--End of Fault Injection (FI) related session-->
+</project>

Added: hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java?rev=890505&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java (added)
+++ hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/FiConfig.java Mon Dec 14 22:26:03 2009
@@ -0,0 +1,110 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+
+/**
+ * This class wraps the logic around fault injection configuration file
+ * Default file is expected to be found in src/test/fi-site.xml
+ * This default file should be copied by JUnit Ant's tasks to 
+ * build/test/extraconf folder before tests are ran
+ * An alternative location can be set through
+ *   -Dfi.config=<file_name>
+ */
+public class FiConfig {
+  private static final String CONFIG_PARAMETER = ProbabilityModel.FPROB_NAME + "config";
+  private static final String DEFAULT_CONFIG = "fi-site.xml";
+  private static Configuration conf;
+  static {
+    init();
+  }
+  
+  protected static void init () {
+    if (conf == null) {
+      conf = new HdfsConfiguration(false);
+      String configName = System.getProperty(CONFIG_PARAMETER, DEFAULT_CONFIG);
+      conf.addResource(configName);
+    }
+  }
+  
+  /**
+   * Method provides access to local Configuration 
+   * 
+   * @return Configuration initialized with fault injection's parameters
+   */
+  public static Configuration getConfig() {
+    return conf;
+  }
+}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.HdfsConfiguration;
+
+/**
+ * This class wraps the logic around fault injection configuration file
+ * Default file is expected to be found in src/test/fi-site.xml
+ * This default file should be copied by JUnit Ant's tasks to 
+ * build/test/extraconf folder before tests are ran
+ * An alternative location can be set through
+ *   -Dfi.config=<file_name>
+ */
+public class FiConfig {
+  private static final String CONFIG_PARAMETER = ProbabilityModel.FPROB_NAME + "config";
+  private static final String DEFAULT_CONFIG = "fi-site.xml";
+  private static Configuration conf;
+  static {
+    init();
+  }
+  
+  protected static void init () {
+    if (conf == null) {
+      conf = new HdfsConfiguration(false);
+      String configName = System.getProperty(CONFIG_PARAMETER, DEFAULT_CONFIG);
+      conf.addResource(configName);
+    }
+  }
+  
+  /**
+   * Method provides access to local Configuration 
+   * 
+   * @return Configuration initialized with fault injection's parameters
+   */
+  public static Configuration getConfig() {
+    return conf;
+  }
+}

Added: hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java?rev=890505&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java (added)
+++ hadoop/mapreduce/trunk/src/test/aop/org/apache/hadoop/fi/ProbabilityModel.java Mon Dec 14 22:26:03 2009
@@ -0,0 +1,210 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import java.util.Random;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This class is responsible for the decision of when a fault 
+ * has to be triggered within a class of Hadoop
+ * 
+ *  Default probability of injection is set to 0%. To change it
+ *  one can set the sys. prop. -Dfi.*=<new probability level>
+ *  Another way to do so is to set this level through FI config file,
+ *  located under src/test/fi-site.conf
+ *  
+ *  To change the level one has to specify the following sys,prop.:
+ *  -Dfi.<name of fault location>=<probability level> in the runtime
+ *  Probability level is specified by a float between 0.0 and 1.0
+ *  
+ *  <name of fault location> might be represented by a short classname
+ *  or otherwise. This decision is left up to the discretion of aspects
+ *  developer, but has to be consistent through the code 
+ */
+public class ProbabilityModel {
+  private static Random generator = new Random();
+  private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
+
+  static final String FPROB_NAME = "fi.";
+  private static final String ALL_PROBABILITIES = FPROB_NAME + "*";
+  private static final float DEFAULT_PROB = 0.00f; //Default probability is 0%
+  private static final float MAX_PROB = 1.00f; // Max probability is 100%
+
+  private static Configuration conf = FiConfig.getConfig();
+
+  static {
+    // Set new default probability if specified through a system.property
+    // If neither is specified set default probability to DEFAULT_PROB 
+    conf.set(ALL_PROBABILITIES, 
+        System.getProperty(ALL_PROBABILITIES, 
+            conf.get(ALL_PROBABILITIES, Float.toString(DEFAULT_PROB))));
+
+    LOG.info(ALL_PROBABILITIES + "=" + conf.get(ALL_PROBABILITIES));
+  }
+
+  /**
+   * Simplistic method to check if we have reached the point of injection
+   * @param klassName is the name of the probability level to check. 
+   *  If a configuration has been set for "fi.myClass" then you can check if the
+   *  inject criteria has been reached by calling this method with "myClass"
+   *  string as its parameter
+   * @return true if the probability threshold has been reached; false otherwise
+   */
+  public static boolean injectCriteria(String klassName) {
+    boolean trigger = false;
+    // TODO fix this: make it more sophisticated!!!
+    if (generator.nextFloat() < getProbability(klassName)) {
+      trigger = true;
+    }
+    return trigger;
+  }
+
+  /**
+   * This primitive checks for arbitrary set of desired probability. If the 
+   * level hasn't been set method will return default setting.
+   * The probability expected to be set as an float between 0.0 and 1.0
+   * @param klass is the name of the resource
+   * @return float representation of configured probability level of 
+   *  the requested resource or default value if hasn't been set
+   */
+  protected static float getProbability(final String klass) {
+    String newProbName = FPROB_NAME + klass;
+
+    String newValue = System.getProperty(newProbName, conf.get(ALL_PROBABILITIES));
+    if (newValue != null && !newValue.equals(conf.get(newProbName)))
+      conf.set(newProbName, newValue);
+
+    float ret = conf.getFloat(newProbName,
+        conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
+    LOG.debug("Request for " + newProbName + " returns=" + ret);
+    // Make sure that probability level is valid.
+    if (ret < DEFAULT_PROB || ret > MAX_PROB) 
+      ret = conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB);
+    
+    return ret;
+  }
+}
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.fi;
+
+import java.util.Random;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+
+/**
+ * This class is responsible for the decision of when a fault 
+ * has to be triggered within a class of Hadoop
+ * 
+ *  Default probability of injection is set to 0%. To change it
+ *  one can set the sys. prop. -Dfi.*=<new probability level>
+ *  Another way to do so is to set this level through FI config file,
+ *  located under src/test/fi-site.conf
+ *  
+ *  To change the level one has to specify the following sys,prop.:
+ *  -Dfi.<name of fault location>=<probability level> in the runtime
+ *  Probability level is specified by a float between 0.0 and 1.0
+ *  
+ *  <name of fault location> might be represented by a short classname
+ *  or otherwise. This decision is left up to the discretion of aspects
+ *  developer, but has to be consistent through the code 
+ */
+public class ProbabilityModel {
+  private static Random generator = new Random();
+  private static final Log LOG = LogFactory.getLog(ProbabilityModel.class);
+
+  static final String FPROB_NAME = "fi.";
+  private static final String ALL_PROBABILITIES = FPROB_NAME + "*";
+  private static final float DEFAULT_PROB = 0.00f; //Default probability is 0%
+  private static final float MAX_PROB = 1.00f; // Max probability is 100%
+
+  private static Configuration conf = FiConfig.getConfig();
+
+  static {
+    // Set new default probability if specified through a system.property
+    // If neither is specified set default probability to DEFAULT_PROB 
+    conf.set(ALL_PROBABILITIES, 
+        System.getProperty(ALL_PROBABILITIES, 
+            conf.get(ALL_PROBABILITIES, Float.toString(DEFAULT_PROB))));
+
+    LOG.info(ALL_PROBABILITIES + "=" + conf.get(ALL_PROBABILITIES));
+  }
+
+  /**
+   * Simplistic method to check if we have reached the point of injection
+   * @param klassName is the name of the probability level to check. 
+   *  If a configuration has been set for "fi.myClass" then you can check if the
+   *  inject criteria has been reached by calling this method with "myClass"
+   *  string as its parameter
+   * @return true if the probability threshold has been reached; false otherwise
+   */
+  public static boolean injectCriteria(String klassName) {
+    boolean trigger = false;
+    // TODO fix this: make it more sophisticated!!!
+    if (generator.nextFloat() < getProbability(klassName)) {
+      trigger = true;
+    }
+    return trigger;
+  }
+
+  /**
+   * This primitive checks for arbitrary set of desired probability. If the 
+   * level hasn't been set method will return default setting.
+   * The probability expected to be set as an float between 0.0 and 1.0
+   * @param klass is the name of the resource
+   * @return float representation of configured probability level of 
+   *  the requested resource or default value if hasn't been set
+   */
+  protected static float getProbability(final String klass) {
+    String newProbName = FPROB_NAME + klass;
+
+    String newValue = System.getProperty(newProbName, conf.get(ALL_PROBABILITIES));
+    if (newValue != null && !newValue.equals(conf.get(newProbName)))
+      conf.set(newProbName, newValue);
+
+    float ret = conf.getFloat(newProbName,
+        conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB));
+    LOG.debug("Request for " + newProbName + " returns=" + ret);
+    // Make sure that probability level is valid.
+    if (ret < DEFAULT_PROB || ret > MAX_PROB) 
+      ret = conf.getFloat(ALL_PROBABILITIES, DEFAULT_PROB);
+    
+    return ret;
+  }
+}

Added: hadoop/mapreduce/trunk/src/test/fi-site.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/fi-site.xml?rev=890505&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/fi-site.xml (added)
+++ hadoop/mapreduce/trunk/src/test/fi-site.xml Mon Dec 14 22:26:03 2009
@@ -0,0 +1,30 @@
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put fault injection specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>fi.*</name>
+    <value>0.00</value>
+    <description>
+    	Default probability level for all injected faults specified 
+    	as a floating number between 0 and 1.00
+    </description>
+  </property>
+</configuration>
+<?xml version="1.0"?>
+<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
+
+<!-- Put fault injection specific property overrides in this file. -->
+
+<configuration>
+  <property>
+    <name>fi.*</name>
+    <value>0.00</value>
+    <description>
+    	Default probability level for all injected faults specified 
+    	as a floating number between 0 and 1.00
+    </description>
+  </property>
+</configuration>