You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by dd...@apache.org on 2009/08/04 09:53:24 UTC

svn commit: r800693 - in /hadoop/mapreduce/trunk: CHANGES.txt src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java

Author: ddas
Date: Tue Aug  4 07:53:24 2009
New Revision: 800693

URL: http://svn.apache.org/viewvc?rev=800693&view=rev
Log:
MAPREDUCE-408. Fixes an assertion problem in TestKillSubProcesses. Contributed by Ravi Gummadi.

Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=800693&r1=800692&r2=800693&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Tue Aug  4 07:53:24 2009
@@ -313,3 +313,6 @@
 
     MAPREDUCE-587. Fixes a OOM issue in TestStreamingExitStatus.
     (Amar Kamat via ddas) 
+
+    MAPREDUCE-408. Fixes an assertion problem in TestKillSubProcesses
+    (Ravi Gummadi via ddas)

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java?rev=800693&r1=800692&r2=800693&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java Tue Aug  4 07:53:24 2009
@@ -53,14 +53,20 @@
   private static volatile Log LOG = LogFactory
             .getLog(TestKillSubProcesses.class);
 
-  private static String TEST_ROOT_DIR = new File(System.getProperty(
-      "test.build.data", "/tmp"), "killjob").toURI().toString().replace(' ', '+');
+  private static String BASE_TEST_ROOT_DIR = new File(System.getProperty(
+      "test.build.data", "/tmp")).getAbsolutePath();
+  private static String TEST_ROOT_DIR = BASE_TEST_ROOT_DIR + Path.SEPARATOR
+      + "killSubProcesses"; 
+
+  private static Path scriptDir = new Path(TEST_ROOT_DIR, "script");
+  private static String scriptDirName = scriptDir.toUri().getPath();
+  private static Path signalFile = new Path(TEST_ROOT_DIR
+      + "/script/signalFile");
 
   private static JobClient jobClient = null;
 
   static MiniMRCluster mr = null;
-  private static Path scriptDir = null;
-  private static String scriptDirName = null;
+
   private static String pid = null;
 
   // number of levels in the subtree of subprocesses of map task
@@ -75,7 +81,6 @@
     conf.setJobName("testkilljobsubprocesses");
     conf.setMapperClass(KillingMapperWithChildren.class);
     
-    scriptDir = new Path(TEST_ROOT_DIR , "script");
     RunningJob job = runJobAndSetProcessHandle(jt, conf);
 
     // kill the job now
@@ -108,9 +113,8 @@
     // check if all the subprocesses are killed properly.
     conf.setMaxMapAttempts(1);
     
-    scriptDir = new Path(TEST_ROOT_DIR + "/script");
     RunningJob job = runJobAndSetProcessHandle(jt, conf);
-    signalTask(TEST_ROOT_DIR + "/failjob/signalFile", conf);
+    signalTask(signalFile.toString(), conf);
     validateKillingSubprocesses(job, conf);
     // Checking the Job status
     assertEquals(job.getJobState(), JobStatus.FAILED);
@@ -126,9 +130,8 @@
     conf.setJobName("testsucceedjobsubprocesses");
     conf.setMapperClass(MapperWithChildren.class);
 
-    scriptDir = new Path(TEST_ROOT_DIR + "/script");
     RunningJob job = runJobAndSetProcessHandle(jt, conf);
-    signalTask(TEST_ROOT_DIR + "/succeedjob/signalFile", conf);
+    signalTask(signalFile.toString(), conf);
     validateKillingSubprocesses(job, conf);
     // Checking the Job status
     assertEquals(job.getJobState(), JobStatus.SUCCEEDED);
@@ -150,7 +153,6 @@
     }
 
     pid = null;
-    scriptDirName = scriptDir.toUri().getPath();
     jobClient = new JobClient(conf);
     
     // get the taskAttemptID of the map task and use it to get the pid
@@ -288,7 +290,9 @@
 
     conf.setNumMapTasks(1);
     conf.setNumReduceTasks(0);
-    conf.set("test.build.data", TEST_ROOT_DIR);
+
+    conf.set("mapred.child.java.opts", conf.get("mapred.child.java.opts") +
+                                  " -Dtest.build.data=" + BASE_TEST_ROOT_DIR);
 
     return UtilsForTests.runJob(conf, inDir, outDir);
   }
@@ -346,18 +350,17 @@
   private static void runChildren(JobConf conf) throws IOException {
     if (ProcessTree.isSetsidAvailable) {
       FileSystem fs = FileSystem.getLocal(conf);
-      TEST_ROOT_DIR = new Path(conf.get("test.build.data")).toUri().getPath();
-      scriptDir = new Path(TEST_ROOT_DIR + "/script");
+
       if(fs.exists(scriptDir)){
         fs.delete(scriptDir, true);
       }
       // create shell script
       Random rm = new Random();
-      Path scriptPath = new Path(scriptDir, "_shellScript_" + rm.nextInt()
+      Path scriptPath = new Path(scriptDirName, "_shellScript_" + rm.nextInt()
         + ".sh");
       String shellScript = scriptPath.toString();
       String script =
-        "echo $$ > " + scriptDir.toString() + "/childPidFile" + "$1\n" +
+        "echo $$ > " + scriptDirName + "/childPidFile" + "$1\n" +
         "echo hello\n" +
         "trap 'echo got SIGTERM' 15 \n" +
         "if [ $1 != 0 ]\nthen\n" +
@@ -375,17 +378,17 @@
       Runtime.getRuntime()
           .exec(shellScript + " " + numLevelsOfSubProcesses);
     
-      String childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDir
+      String childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDirName
           + "/childPidFile" + 0);
       while (childPid == null) {
-        LOG.warn(scriptDir + "/childPidFile" + 0 + " is null; Sleeping...");
+        LOG.warn(scriptDirName + "/childPidFile" + 0 + " is null; Sleeping...");
         try {
           Thread.sleep(500);
         } catch (InterruptedException ie) {
           LOG.warn("sleep is interrupted:" + ie);
           break;
         }
-        childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDir
+        childPid = TestProcfsBasedProcessTree.getPidFromPidFile(scriptDirName
             + "/childPidFile" + 0);
       }
     }
@@ -397,9 +400,9 @@
   static class MapperWithChildren extends MapReduceBase implements
   Mapper<WritableComparable, Writable, WritableComparable, Writable> {
     FileSystem fs = null;
-    final Path signal = new Path(TEST_ROOT_DIR + "/script/signalFile");
     public void configure(JobConf conf) {
       try {
+        fs = FileSystem.getLocal(conf);
         runChildren(conf);
       } catch (Exception e) {
         LOG.warn("Exception in configure: " +
@@ -411,18 +414,16 @@
     public void map(WritableComparable key, Writable value,
         OutputCollector<WritableComparable, Writable> out, Reporter reporter)
         throws IOException {
-      if (fs != null) {
-        while (!fs.exists(signal)) {// wait for signal file creation
-          try {
-            reporter.progress();
-            synchronized (this) {
-              this.wait(1000);
-            }
-          } catch (InterruptedException ie) {
-            System.out.println("Interrupted while the map was waiting for "
-                               + " the signal.");
-            break;
+      while (!fs.exists(signalFile)) {// wait for signal file creation
+        try {
+          reporter.progress();
+          synchronized (this) {
+            this.wait(1000);
           }
+        } catch (InterruptedException ie) {
+          System.out.println("Interrupted while the map was waiting for "
+              + " the signal.");
+          break;
         }
       }
     }
@@ -461,18 +462,16 @@
     public void map(WritableComparable key, Writable value,
         OutputCollector<WritableComparable, Writable> out, Reporter reporter)
         throws IOException {
-      if (fs != null) {
-        while (!fs.exists(signal)) {// wait for signal file creation
-          try {
-            reporter.progress();
-            synchronized (this) {
-              this.wait(1000);
-            }
-          } catch (InterruptedException ie) {
-            System.out.println("Interrupted while the map was waiting for "
-                               + " the signal.");
-            break;
+      while (!fs.exists(signalFile)) {// wait for signal file creation
+        try {
+          reporter.progress();
+          synchronized (this) {
+            this.wait(1000);
           }
+        } catch (InterruptedException ie) {
+          System.out.println("Interrupted while the map was waiting for "
+              + " the signal.");
+          break;
         }
       }
       throw new RuntimeException("failing map");