You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ac...@apache.org on 2009/08/17 05:56:30 UTC

svn commit: r804844 - in /hadoop/mapreduce/trunk: ./ src/contrib/streaming/src/test/org/apache/hadoop/streaming/ src/docs/src/documentation/content/xdocs/ src/java/org/apache/hadoop/mapred/ src/test/mapred/org/apache/hadoop/mapred/ src/test/mapred/org/...

Author: acmurthy
Date: Mon Aug 17 03:56:29 2009
New Revision: 804844

URL: http://svn.apache.org/viewvc?rev=804844&view=rev
Log:
MAPREDUCE-478. Allow map and reduce jvm parameters, environment variables and ulimit to be set separately.

Added:
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java
Modified:
    hadoop/mapreduce/trunk/CHANGES.txt
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
    hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
    hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java
    hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java
    hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java

Modified: hadoop/mapreduce/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/CHANGES.txt?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/CHANGES.txt (original)
+++ hadoop/mapreduce/trunk/CHANGES.txt Mon Aug 17 03:56:29 2009
@@ -196,6 +196,20 @@
     intermediate outputs and log files on tasktrackers.
     (Vinod Kumar Vavilapalli via yhemanth)
     
+    MAPREDUCE-478. Allow map and reduce jvm parameters, environment variables
+    and ulimit to be set separately.
+    Configuration changes:
+      add mapred.map.child.java.opts
+      add mapred.reduce.child.java.opts
+      add mapred.map.child.env
+      add mapred.reduce.child.ulimit
+      add mapred.map.child.env
+      add mapred.reduce.child.ulimit
+      deprecated mapred.child.java.opts
+      deprecated mapred.child.env
+      deprecated mapred.child.ulimit
+    (acmurthy)
+
   BUG FIXES
 
     MAPREDUCE-878. Rename fair scheduler design doc to 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestMultipleCachefiles.java Mon Aug 17 03:56:29 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputLogFilter;
 /**
@@ -84,9 +85,18 @@
           "-jobconf", strNamenode,
           "-jobconf", strJobtracker,
           "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", "mapred.child.java.opts=-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-                      "-Dbuild.test=" + System.getProperty("build.test") + " " +
-                      conf.get("mapred.child.java.opts",""),
+          "-jobconf", 
+            JobConf.MAPRED_MAP_TASK_JAVA_OPTS + "=" +
+              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+              "-Dbuild.test=" + System.getProperty("build.test") + " " +
+              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+          "-jobconf", 
+            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + "=" +
+              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+              "-Dbuild.test=" + System.getProperty("build.test") + " " +
+              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
           "-cacheFile", fileSys.getUri() + CACHE_FILE + "#" + mapString,
           "-cacheFile", fileSys.getUri() + CACHE_FILE_2 + "#" + mapString2
         };

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestSymLink.java Mon Aug 17 03:56:29 2009
@@ -32,6 +32,7 @@
 import org.apache.hadoop.fs.FileUtil;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.OutputLogFilter;
 /**
@@ -80,9 +81,18 @@
           "-jobconf", strNamenode,
           "-jobconf", strJobtracker,
           "-jobconf", "stream.tmpdir="+System.getProperty("test.build.data","/tmp"),
-          "-jobconf", "mapred.child.java.opts=-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
-                      "-Dbuild.test=" + System.getProperty("build.test") + " " +
-                      conf.get("mapred.child.java.opts",""),
+          "-jobconf", 
+            JobConf.MAPRED_MAP_TASK_JAVA_OPTS+ "=" +
+              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+              "-Dbuild.test=" + System.getProperty("build.test") + " " +
+              conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
+          "-jobconf", 
+            JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS+ "=" +
+              "-Dcontrib.name=" + System.getProperty("contrib.name") + " " +
+              "-Dbuild.test=" + System.getProperty("build.test") + " " +
+              conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                       conf.get(JobConf.MAPRED_TASK_JAVA_OPTS, "")),
           "-cacheFile", fileSys.getUri() + CACHE_FILE + "#testlink"
         };
 

Modified: hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java (original)
+++ hadoop/mapreduce/trunk/src/contrib/streaming/src/test/org/apache/hadoop/streaming/TestUlimit.java Mon Aug 17 03:56:29 2009
@@ -24,6 +24,7 @@
 import org.apache.hadoop.hdfs.MiniDFSCluster;
 import org.apache.hadoop.fs.FileSystem;
 import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapred.JobConf;
 import org.apache.hadoop.mapred.MiniMRCluster;
 import org.apache.hadoop.mapred.TestMiniMRWithDFS;
 import org.apache.hadoop.util.StringUtils;
@@ -55,7 +56,7 @@
       "-reducer", "org.apache.hadoop.mapred.lib.IdentityReducer",
       "-numReduceTasks", "0",
       "-jobconf", "mapred.map.tasks=1",
-      "-jobconf", "mapred.child.ulimit=" + memLimit,
+      "-jobconf", JobConf.MAPRED_MAP_TASK_ULIMIT + "=" + memLimit,
       "-jobconf", "mapred.job.tracker=" + "localhost:" +
                                            mr.getJobTrackerPort(),
       "-jobconf", "fs.default.name=" + "hdfs://localhost:" 

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/cluster_setup.xml Mon Aug 17 03:56:29 2009
@@ -392,10 +392,18 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.child.java.opts</td>
+                    <td>mapred.map.child.java.opts</td>
                     <td>-Xmx512M</td>
                     <td>
-                      Larger heap-size for child jvms of maps/reduces. 
+                      Larger heap-size for child jvms of maps. 
+                    </td>
+                  </tr>
+                  <tr>
+                    <td>conf/mapred-site.xml</td>
+                    <td>mapred.reduce.child.java.opts</td>
+                    <td>-Xmx512M</td>
+                    <td>
+                      Larger heap-size for child jvms of reduces. 
                     </td>
                   </tr>
                   <tr>
@@ -465,9 +473,17 @@
                   </tr>
                   <tr>
                     <td>conf/mapred-site.xml</td>
-                    <td>mapred.child.java.opts</td>
+                    <td>mapred.map.child.java.opts</td>
+                    <td>-Xmx512M</td>
+                    <td>
+                      Larger heap-size for child jvms of maps. 
+                    </td>
+                  </tr>
+                  <tr>
+                    <td>conf/mapred-site.xml</td>
+                    <td>mapred.reduce.child.java.opts</td>
                     <td>-Xmx1024M</td>
-                    <td>Larger heap-size for child jvms of maps/reduces.</td>
+                    <td>Larger heap-size for child jvms of reduces.</td>
                   </tr>
                 </table>
               </li>
@@ -477,14 +493,14 @@
         <title> Memory management</title>
         <p>Users/admins can also specify the maximum virtual memory 
         of the launched child-task, and any sub-process it launches 
-        recursively, using <code>mapred.child.ulimit</code>. Note that
-        the value set here is a per process limit.
-        The value for <code>mapred.child.ulimit</code> should be specified 
-        in kilo bytes (KB). And also the value must be greater than
+        recursively, using <code>mapred.{map|reduce}.child.ulimit</code>. Note 
+        that the value set here is a per process limit.
+        The value for <code>mapred.{map|reduce}.child.ulimit</code> should be 
+        specified in kilo bytes (KB). And also the value must be greater than
         or equal to the -Xmx passed to JavaVM, else the VM might not start. 
         </p>
         
-        <p>Note: <code>mapred.child.java.opts</code> are used only for 
+        <p>Note: <code>mapred.{map|reduce}.child.java.opts</code> are used only for 
         configuring the launched child tasks from task tracker. Configuring 
         the memory options for daemons is documented in 
         <a href="cluster_setup.html#Configuring+the+Environment+of+the+Hadoop+Daemons">

Modified: hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml (original)
+++ hadoop/mapreduce/trunk/src/docs/src/documentation/content/xdocs/mapred_tutorial.xml Mon Aug 17 03:56:29 2009
@@ -1052,24 +1052,25 @@
         
         <p>The child-task inherits the environment of the parent 
         <code>TaskTracker</code>. The user can specify additional options to the
-        child-jvm via the <code>mapred.child.java.opts</code> configuration
-        parameter in the <code>JobConf</code> such as non-standard paths for the 
-        run-time linker to search shared libraries via 
+        child-jvm via the <code>mapred.{map|reduce}.child.java.opts</code> 
+        configuration parameter in the <code>JobConf</code> such as non-standard 
+         paths for the run-time linker to search shared libraries via 
         <code>-Djava.library.path=&lt;&gt;</code> etc. If the 
-        <code>mapred.child.java.opts</code> contains the symbol <em>@taskid@</em> 
-        it is interpolated with value of <code>taskid</code> of the Map/Reduce
-        task.</p>
+        <code>mapred.{map|reduce}.child.java.opts</code> parameters contains the 
+        symbol <em>@taskid@</em> it is interpolated with value of 
+        <code>taskid</code> of the map/reduce task.</p>
         
         <p>Here is an example with multiple arguments and substitutions, 
         showing jvm GC logging, and start of a passwordless JVM JMX agent so that
         it can connect with jconsole and the likes to watch child memory, 
         threads and get thread dumps. It also sets the maximum heap-size of the 
-        child jvm to 512MB and adds an additional path to the 
-        <code>java.library.path</code> of the child-jvm.</p>
+        map and reduce child jvm to 512MB &amp; 1024MB respectively. It also 
+        adds an additional path to the <code>java.library.path</code> of the 
+        child-jvm.</p>
 
         <p>
           <code>&lt;property&gt;</code><br/>
-          &nbsp;&nbsp;<code>&lt;name&gt;mapred.child.java.opts&lt;/name&gt;</code><br/>
+          &nbsp;&nbsp;<code>&lt;name&gt;mapred.map.child.java.opts&lt;/name&gt;</code><br/>
           &nbsp;&nbsp;<code>&lt;value&gt;</code><br/>
           &nbsp;&nbsp;&nbsp;&nbsp;<code>
                     -Xmx512M -Djava.library.path=/home/mycompany/lib
@@ -1081,19 +1082,33 @@
           <code>&lt;/property&gt;</code>
         </p>
         
+        <p>
+          <code>&lt;property&gt;</code><br/>
+          &nbsp;&nbsp;<code>&lt;name&gt;mapred.reduce.child.java.opts&lt;/name&gt;</code><br/>
+          &nbsp;&nbsp;<code>&lt;value&gt;</code><br/>
+          &nbsp;&nbsp;&nbsp;&nbsp;<code>
+                    -Xmx1024M -Djava.library.path=/home/mycompany/lib
+                    -verbose:gc -Xloggc:/tmp/@taskid@.gc</code><br/>
+          &nbsp;&nbsp;&nbsp;&nbsp;<code>
+                    -Dcom.sun.management.jmxremote.authenticate=false 
+                    -Dcom.sun.management.jmxremote.ssl=false</code><br/>
+          &nbsp;&nbsp;<code>&lt;/value&gt;</code><br/>
+          <code>&lt;/property&gt;</code>
+        </p>
+        
         <section>
         <title> Memory Management</title>
         <p>Users/admins can also specify the maximum virtual memory 
         of the launched child-task, and any sub-process it launches 
-        recursively, using <code>mapred.child.ulimit</code>. Note that
-        the value set here is a per process limit.
-        The value for <code>mapred.child.ulimit</code> should be specified 
-        in kilo bytes (KB). And also the value must be greater than
+        recursively, using <code>mapred.{map|reduce}.child.ulimit</code>. Note 
+        that the value set here is a per process limit.
+        The value for <code>mapred.{map|reduce}.child.ulimit</code> should be 
+        specified in kilo bytes (KB). And also the value must be greater than
         or equal to the -Xmx passed to JavaVM, else the VM might not start. 
         </p>
         
-        <p>Note: <code>mapred.child.java.opts</code> are used only for 
-        configuring the launched child tasks from task tracker. Configuring 
+        <p>Note: <code>mapred.{map|reduce}.child.java.opts</code> are used only 
+        for configuring the launched child tasks from task tracker. Configuring 
         the memory options for daemons is documented in 
         <a href="cluster_setup.html#Configuring+the+Environment+of+the+Hadoop+Daemons">
         cluster_setup.html </a></p>
@@ -1230,7 +1245,7 @@
                 shuffle.</td></tr>
             <tr><td>mapred.job.shuffle.input.buffer.percent</td><td>float</td>
                 <td>The percentage of memory- relative to the maximum heapsize
-                as typically specified in <code>mapred.child.java.opts</code>-
+                as typically specified in <code>mapred.reduce.child.java.opts</code>-
                 that can be allocated to storing map outputs during the
                 shuffle. Though some memory should be set aside for the
                 framework, in general it is advantageous to set this high

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/JobConf.java Mon Aug 17 03:56:29 2009
@@ -158,6 +158,153 @@
       "mapred.job.reduce.memory.mb";
 
   /**
+   * Configuration key to set the java command line options for the child
+   * map and reduce tasks.
+   * 
+   * Java opts for the task tracker child processes.
+   * The following symbol, if present, will be interpolated: @taskid@. 
+   * It is replaced by current TaskID. Any other occurrences of '@' will go 
+   * unchanged.
+   * For example, to enable verbose gc logging to a file named for the taskid in
+   * /tmp and to set the heap maximum to be a gigabyte, pass a 'value' of:
+   *          -Xmx1024m -verbose:gc -Xloggc:/tmp/@taskid@.gc
+   * 
+   * The configuration variable {@link #MAPRED_TASK_ULIMIT} can be used to 
+   * control the maximum virtual memory of the child processes.
+   * 
+   * The configuration variable {@link #MAPRED_TASK_ENV} can be used to pass 
+   * other environment variables to the child processes.
+   * 
+   * @deprecated Use {@link #MAPRED_MAP_TASK_JAVA_OPTS} or 
+   *                 {@link #MAPRED_REDUCE_TASK_JAVA_OPTS}
+   */
+  @Deprecated
+  public static final String MAPRED_TASK_JAVA_OPTS = "mapred.child.java.opts";
+  
+  /**
+   * Configuration key to set the java command line options for the map tasks.
+   * 
+   * Java opts for the task tracker child map processes.
+   * The following symbol, if present, will be interpolated: @taskid@. 
+   * It is replaced by current TaskID. Any other occurrences of '@' will go 
+   * unchanged.
+   * For example, to enable verbose gc logging to a file named for the taskid in
+   * /tmp and to set the heap maximum to be a gigabyte, pass a 'value' of:
+   *          -Xmx1024m -verbose:gc -Xloggc:/tmp/@taskid@.gc
+   * 
+   * The configuration variable {@link #MAPRED_MAP_TASK_ULIMIT} can be used to 
+   * control the maximum virtual memory of the map processes.
+   * 
+   * The configuration variable {@link #MAPRED_MAP_TASK_ENV} can be used to pass 
+   * other environment variables to the map processes.
+   */
+  public static final String MAPRED_MAP_TASK_JAVA_OPTS = 
+    "mapred.map.child.java.opts";
+  
+  /**
+   * Configuration key to set the java command line options for the reduce tasks.
+   * 
+   * Java opts for the task tracker child reduce processes.
+   * The following symbol, if present, will be interpolated: @taskid@. 
+   * It is replaced by current TaskID. Any other occurrences of '@' will go 
+   * unchanged.
+   * For example, to enable verbose gc logging to a file named for the taskid in
+   * /tmp and to set the heap maximum to be a gigabyte, pass a 'value' of:
+   *          -Xmx1024m -verbose:gc -Xloggc:/tmp/@taskid@.gc
+   * 
+   * The configuration variable {@link #MAPRED_REDUCE_TASK_ULIMIT} can be used  
+   * to control the maximum virtual memory of the reduce processes.
+   * 
+   * The configuration variable {@link #MAPRED_REDUCE_TASK_ENV} can be used to 
+   * pass process environment variables to the reduce processes.
+   */
+  public static final String MAPRED_REDUCE_TASK_JAVA_OPTS = 
+    "mapred.reduce.child.java.opts";
+  
+  public static final String DEFAULT_MAPRED_TASK_JAVA_OPTS = "-Xmx200m";
+  
+  /**
+   * Configuration key to set the maximum virutal memory available to the child
+   * map and reduce tasks (in kilo-bytes).
+   * 
+   * Note: This must be greater than or equal to the -Xmx passed to the JavaVM
+   *       via {@link #MAPRED_TASK_JAVA_OPTS}, else the VM might not start.
+   * 
+   * @deprecated Use {@link #MAPRED_MAP_TASK_ULIMIT} or 
+   *                 {@link #MAPRED_REDUCE_TASK_ULIMIT}
+   */
+  @Deprecated
+  public static final String MAPRED_TASK_ULIMIT = "mapred.child.ulimit";
+
+  /**
+   * Configuration key to set the maximum virutal memory available to the
+   * map tasks (in kilo-bytes).
+   * 
+   * Note: This must be greater than or equal to the -Xmx passed to the JavaVM
+   *       via {@link #MAPRED_MAP_TASK_JAVA_OPTS}, else the VM might not start.
+   */
+  public static final String MAPRED_MAP_TASK_ULIMIT = "mapred.map.child.ulimit";
+  
+  /**
+   * Configuration key to set the maximum virutal memory available to the
+   * reduce tasks (in kilo-bytes).
+   * 
+   * Note: This must be greater than or equal to the -Xmx passed to the JavaVM
+   *       via {@link #MAPRED_REDUCE_TASK_JAVA_OPTS}, else the VM might not start.
+   */
+  public static final String MAPRED_REDUCE_TASK_ULIMIT =
+    "mapred.reduce.child.ulimit";
+
+  /**
+   * Configuration key to set the environment of the child map/reduce tasks.
+   * 
+   * The format of the value is <code>k1=v1,k2=v2</code>. Further it can 
+   * reference existing environment variables via <code>$key</code>.
+   * 
+   * Example:
+   * <ul>
+   *   <li> A=foo - This will set the env variable A to foo. </li>
+   *   <li> B=$X:c This is inherit tasktracker's X env variable. </li>
+   * </ul>
+   * 
+   * @deprecated Use {@link #MAPRED_MAP_TASK_ENV} or 
+   *                 {@link #MAPRED_REDUCE_TASK_ENV}
+   */
+  @Deprecated
+  public static final String MAPRED_TASK_ENV = "mapred.child.env";
+
+  /**
+   * Configuration key to set the maximum virutal memory available to the
+   * map tasks.
+   * 
+   * The format of the value is <code>k1=v1,k2=v2</code>. Further it can 
+   * reference existing environment variables via <code>$key</code>.
+   * 
+   * Example:
+   * <ul>
+   *   <li> A=foo - This will set the env variable A to foo. </li>
+   *   <li> B=$X:c This is inherit tasktracker's X env variable. </li>
+   * </ul>
+   */
+  public static final String MAPRED_MAP_TASK_ENV = "mapred.map.child.env";
+  
+  /**
+   * Configuration key to set the maximum virutal memory available to the
+   * reduce tasks.
+   * 
+   * The format of the value is <code>k1=v1,k2=v2</code>. Further it can 
+   * reference existing environment variables via <code>$key</code>.
+   * 
+   * Example:
+   * <ul>
+   *   <li> A=foo - This will set the env variable A to foo. </li>
+   *   <li> B=$X:c This is inherit tasktracker's X env variable. </li>
+   * </ul>
+   */
+  public static final String MAPRED_REDUCE_TASK_ENV =
+    "mapred.reduce.child.env";
+
+  /**
    * Construct a map/reduce job configuration.
    */
   public JobConf() {}

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/MapTaskRunner.java Mon Aug 17 03:56:29 2009
@@ -23,7 +23,7 @@
 
 /** Runs a map task. */
 class MapTaskRunner extends TaskRunner {
-
+  
   public MapTaskRunner(TaskInProgress task, TaskTracker tracker, JobConf conf) {
     super(task, tracker, conf);
   }
@@ -43,4 +43,23 @@
     LOG.info(getTask()+" done; removing files.");
     mapOutputFile.removeAll();
   }
+
+  @Override
+  public String getChildJavaOpts(JobConf jobConf, String defaultValue) {
+    return jobConf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                       super.getChildJavaOpts(jobConf, 
+                           JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS));
+  }
+  
+  @Override
+  public int getChildUlimit(JobConf jobConf) {
+    return jobConf.getInt(JobConf.MAPRED_MAP_TASK_ULIMIT, 
+                          super.getChildUlimit(jobConf));
+  }
+
+  @Override
+  public String getChildEnv(JobConf jobConf) {
+    return jobConf.get(JobConf.MAPRED_MAP_TASK_ENV, super.getChildEnv(jobConf));
+  }
+
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/ReduceTaskRunner.java Mon Aug 17 03:56:29 2009
@@ -23,7 +23,7 @@
 
 /** Runs a reduce task. */
 class ReduceTaskRunner extends TaskRunner {
-  
+
   public ReduceTaskRunner(TaskInProgress task, TaskTracker tracker, 
                           JobConf conf) throws IOException {
     
@@ -48,4 +48,24 @@
     getTask().getProgress().setStatus("closed");
     mapOutputFile.removeAll();
   }
+
+  @Override
+  public String getChildJavaOpts(JobConf jobConf, String defaultValue) {
+    return jobConf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                       super.getChildJavaOpts(jobConf, 
+                           JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS));
+  }
+ 
+  @Override
+  public int getChildUlimit(JobConf jobConf) {
+    return jobConf.getInt(JobConf.MAPRED_REDUCE_TASK_ULIMIT, 
+                          super.getChildUlimit(jobConf));
+  }
+
+  @Override
+  public String getChildEnv(JobConf jobConf) {
+    return jobConf.get(JobConf.MAPRED_REDUCE_TASK_ENV, 
+                       super.getChildEnv(jobConf));
+  }
+
 }

Modified: hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java (original)
+++ hadoop/mapreduce/trunk/src/java/org/apache/hadoop/mapred/TaskRunner.java Mon Aug 17 03:56:29 2009
@@ -112,6 +112,48 @@
     return str.toString();
   }
   
+
+  /**
+   * Get the java command line options for the child map/reduce tasks.
+   * @param jobConf job configuration
+   * @param defaultValue default value
+   * @return the java command line options for child map/reduce tasks
+   * @deprecated Use command line options specific to map or reduce tasks set 
+   *             via {@link JobConf#MAPRED_MAP_TASK_JAVA_OPTS} or 
+   *             {@link JobConf#MAPRED_REDUCE_TASK_JAVA_OPTS}
+   */
+  @Deprecated
+  public String getChildJavaOpts(JobConf jobConf, String defaultValue) {
+    return jobConf.get(JobConf.MAPRED_TASK_JAVA_OPTS, defaultValue);
+  }
+  
+  /**
+   * Get the maximum virtual memory of the child map/reduce tasks.
+   * @param jobConf job configuration
+   * @return the maximum virtual memory of the child task or <code>-1</code> if
+   *         none is specified
+   * @deprecated Use limits specific to the map or reduce tasks set via
+   *             {@link JobConf#MAPRED_MAP_TASK_ULIMIT} or
+   *             {@link JobConf#MAPRED_REDUCE_TASK_ULIMIT} 
+   */
+  @Deprecated
+  public int getChildUlimit(JobConf jobConf) {
+    return jobConf.getInt(JobConf.MAPRED_TASK_ULIMIT, -1);
+  }
+  
+  /**
+   * Get the environment variables for the child map/reduce tasks.
+   * @param jobConf job configuration
+   * @return the environment variables for the child map/reduce tasks or
+   *         <code>null</code> if unspecified
+   * @deprecated Use environment variables specific to the map or reduce tasks
+   *             set via {@link JobConf#MAPRED_MAP_TASK_ENV} or
+   *             {@link JobConf#MAPRED_REDUCE_TASK_ENV}
+   */
+  public String getChildEnv(JobConf jobConf) {
+    return jobConf.get(JobConf.MAPRED_TASK_ENV);
+  }
+
   @Override
   public final void run() {
     String errorInfo = "Child Error";
@@ -266,7 +308,7 @@
    * @return
    */
   private List<String> getVMSetupCmd() {
-    String[] ulimitCmd = Shell.getUlimitMemoryCommand(conf);
+    String[] ulimitCmd = Shell.getUlimitMemoryCommand(getChildUlimit(conf));
     List<String> setup = null;
     if (ulimitCmd != null) {
       setup = new ArrayList<String>();
@@ -296,8 +338,8 @@
 
     // Add child (task) java-vm options.
     //
-    // The following symbols if present in mapred.child.java.opts value are
-    // replaced:
+    // The following symbols if present in mapred.{map|reduce}.child.java.opts 
+    // value are replaced:
     // + @taskid@ is interpolated with value of TaskID.
     // Other occurrences of @ will not be altered.
     //
@@ -308,13 +350,22 @@
     //
     //  <property>
     //    <name>mapred.child.java.opts</name>
-    //    <value>-verbose:gc -Xloggc:/tmp/@taskid@.gc \
+    //    <value>-Xmx512M -verbose:gc -Xloggc:/tmp/@taskid@.gc \
+    //           -Dcom.sun.management.jmxremote.authenticate=false \
+    //           -Dcom.sun.management.jmxremote.ssl=false \
+    //    </value>
+    //  </property>
+    //
+    //  <property>
+    //    <name>mapred.child.java.opts</name>
+    //    <value>-Xmx1024M -verbose:gc -Xloggc:/tmp/@taskid@.gc \
     //           -Dcom.sun.management.jmxremote.authenticate=false \
     //           -Dcom.sun.management.jmxremote.ssl=false \
     //    </value>
     //  </property>
     //
-    String javaOpts = conf.get("mapred.child.java.opts", "-Xmx200m");
+    String javaOpts = getChildJavaOpts(conf, 
+                                       JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS);
     javaOpts = javaOpts.replace("@taskid@", taskid.toString());
     String [] javaOptsSplit = javaOpts.split(" ");
     
@@ -325,7 +376,7 @@
     // 2. We also add the 'cwd' of the task to it's java.library.path to help 
     //    users distribute native libraries via the DistributedCache.
     // 3. The user can also specify extra paths to be added to the 
-    //    java.library.path via mapred.child.java.opts.
+    //    java.library.path via mapred.{map|reduce}.child.java.opts.
     //
     String libraryPath = System.getProperty("java.library.path");
     if (libraryPath == null) {
@@ -436,7 +487,7 @@
    * @return
    * @throws Throwable
    */
-  private static String getVMEnvironment(String errorInfo, File workDir, JobConf conf,
+  private String getVMEnvironment(String errorInfo, File workDir, JobConf conf,
       Map<String, String> env)
       throws Throwable {
     StringBuffer ldLibraryPath = new StringBuffer();
@@ -450,7 +501,7 @@
     env.put("LD_LIBRARY_PATH", ldLibraryPath.toString());
     
     // add the env variables passed by the user
-    String mapredChildEnv = conf.get("mapred.child.env");
+    String mapredChildEnv = getChildEnv(conf);
     if (mapredChildEnv != null && mapredChildEnv.length() > 0) {
       String childEnvs[] = mapredChildEnv.split(",");
       for (String cEnv : childEnvs) {

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestJobExecutionAsDifferentUser.java Mon Aug 17 03:56:29 2009
@@ -78,7 +78,7 @@
     Path inDir = new Path("input1");
     Path outDir = new Path("output1");
     try {
-      childTask.runTestTaskEnv(getClusterConf(), inDir, outDir);
+      childTask.runTestTaskEnv(getClusterConf(), inDir, outDir, false);
     } catch (IOException e) {
       fail("IOException thrown while running enviroment test."
           + e.getMessage());

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestKillSubProcesses.java Mon Aug 17 03:56:29 2009
@@ -291,8 +291,14 @@
     conf.setNumMapTasks(1);
     conf.setNumReduceTasks(0);
 
-    conf.set("mapred.child.java.opts", conf.get("mapred.child.java.opts") +
-                                  " -Dtest.build.data=" + BASE_TEST_ROOT_DIR);
+    conf.set(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+             conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, 
+                      conf.get(JobConf.MAPRED_TASK_JAVA_OPTS)) +
+             " -Dtest.build.data=" + BASE_TEST_ROOT_DIR);
+    conf.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+             conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, 
+                      conf.get(JobConf.MAPRED_TASK_JAVA_OPTS)) +
+             " -Dtest.build.data=" + BASE_TEST_ROOT_DIR);
 
     return UtilsForTests.runJob(conf, inDir, outDir);
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestMiniMRChildTask.java Mon Aug 17 03:56:29 2009
@@ -18,6 +18,8 @@
 package org.apache.hadoop.mapred;
 
 import java.io.*;
+import java.util.Iterator;
+
 import junit.framework.TestCase;
 
 import org.apache.commons.logging.Log;
@@ -42,6 +44,11 @@
   private static final Log LOG =
     LogFactory.getLog(TestMiniMRChildTask.class.getName());
 
+  private final static String OLD_CONFIGS = "test.old.configs";
+  private final static String TASK_OPTS_VAL = "-Xmx200m";
+  private final static String MAP_OPTS_VAL = "-Xmx200m";
+  private final static String REDUCE_OPTS_VAL = "-Xmx300m";
+
   private MiniMRCluster mr;
   private MiniDFSCluster dfs;
   private FileSystem fileSys;
@@ -85,7 +92,8 @@
 
   // configure a job
   private void configure(JobConf conf, Path inDir, Path outDir, String input,
-                         Class<? extends Mapper> map) 
+                         Class<? extends Mapper> map, 
+                         Class<? extends Reducer> reduce) 
   throws IOException {
     // set up the input file system and write input text.
     FileSystem inFs = inDir.getFileSystem(conf);
@@ -104,7 +112,7 @@
     // configure the mapred Job which creates a tempfile in map.
     conf.setJobName("testmap");
     conf.setMapperClass(map);
-    conf.setReducerClass(IdentityReducer.class);
+    conf.setReducerClass(reduce);
     conf.setNumMapTasks(1);
     conf.setNumReduceTasks(0);
     FileInputFormat.setInputPaths(conf, inDir);
@@ -127,7 +135,8 @@
                          Path outDir,
                          String input)
   throws IOException {
-    configure(conf, inDir, outDir, input, MapClass.class);
+    configure(conf, inDir, outDir, input, 
+              MapClass.class, IdentityReducer.class);
 
     FileSystem outFs = outDir.getFileSystem(conf);
     
@@ -147,16 +156,52 @@
     outFs.delete(outDir, true);
   }
 
+  private static void checkEnv(String envName, String expValue, String mode) {
+    String envValue = System.getenv(envName).trim();
+    if ("append".equals(mode)) {
+      if (envValue == null || !envValue.contains(":")) {
+        throw new RuntimeException("Missing env variable");
+      } else {
+        String parts[] = envValue.split(":");
+        // check if the value is appended
+        if (!parts[parts.length - 1].equals(expValue)) {
+          throw new RuntimeException("Wrong env variable in append mode");
+        }
+      }
+    } else {
+      if (envValue == null || !envValue.equals(expValue)) {
+        throw new RuntimeException("Wrong env variable in noappend mode");
+      }
+    }
+  }
+
   // Mappers that simply checks if the desired user env are present or not
   static class EnvCheckMapper extends MapReduceBase implements
       Mapper<WritableComparable, Writable, WritableComparable, Writable> {
-    private static String PATH;
     
-    public void map(WritableComparable key, Writable value,
-        OutputCollector<WritableComparable, Writable> out, Reporter reporter)
-        throws IOException {
+    public void configure(JobConf job) {
+      boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false);
+      if (oldConfigs) {
+        String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
+                      javaOpts);
+        assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + 
+                     javaOpts, 
+                     javaOpts, TASK_OPTS_VAL);
+      } else {
+        String mapJavaOpts = job.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!", 
+                      mapJavaOpts);
+        assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + 
+                     mapJavaOpts, 
+                     mapJavaOpts, MAP_OPTS_VAL);
+      }
+
+      String path = job.get("path");
+      
       // check if the pwd is there in LD_LIBRARY_PATH
       String pwd = System.getenv("PWD");
+      
       assertTrue("LD doesnt contain pwd", 
                  System.getenv("LD_LIBRARY_PATH").contains(pwd));
       
@@ -170,34 +215,69 @@
       checkEnv("NEW_PATH", ":/tmp", "noappend");
       // check if X=$(tt's X var):/tmp for an old env variable inherited from 
       // the tt
-      checkEnv("PATH",  PATH + ":/tmp", "noappend");
+      checkEnv("PATH",  path + ":/tmp", "noappend");
     }
 
-    private void checkEnv(String envName, String expValue, String mode) 
-    throws IOException {
-      String envValue = System.getenv(envName).trim();
-      if ("append".equals(mode)) {
-        if (envValue == null || !envValue.contains(":")) {
-          throw new  IOException("Missing env variable");
-        } else {
-          String parts[] = envValue.split(":");
-          // check if the value is appended
-          if (!parts[parts.length - 1].equals(expValue)) {
-            throw new  IOException("Wrong env variable in append mode");
-          }
-        }
+    public void map(WritableComparable key, Writable value,
+                    OutputCollector<WritableComparable, Writable> out, 
+                    Reporter reporter)
+        throws IOException {
+    }
+  }
+
+  static class EnvCheckReducer extends MapReduceBase 
+  implements Reducer<WritableComparable, Writable, WritableComparable, Writable> {
+    
+    @Override
+    public void configure(JobConf job) {
+      boolean oldConfigs = job.getBoolean(OLD_CONFIGS, false);
+      if (oldConfigs) {
+        String javaOpts = job.get(JobConf.MAPRED_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
+                      javaOpts);
+        assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + 
+                     javaOpts, 
+                     javaOpts, TASK_OPTS_VAL);
       } else {
-        if (envValue == null || !envValue.equals(expValue)) {
-          throw new  IOException("Wrong env variable in noappend mode");
-        }
+        String reduceJavaOpts = job.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!", 
+                      reduceJavaOpts);
+        assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + 
+                     reduceJavaOpts, 
+                     reduceJavaOpts, REDUCE_OPTS_VAL);
       }
+
+      String path = job.get("path");
+      
+      // check if the pwd is there in LD_LIBRARY_PATH
+      String pwd = System.getenv("PWD");
+      
+      assertTrue("LD doesnt contain pwd", 
+                 System.getenv("LD_LIBRARY_PATH").contains(pwd));
+      
+      // check if X=$X:/abc works for LD_LIBRARY_PATH
+      checkEnv("LD_LIBRARY_PATH", "/tmp", "append");
+      // check if X=/tmp works for an already existing parameter
+      checkEnv("HOME", "/tmp", "noappend");
+      // check if X=/tmp for a new env variable
+      checkEnv("MY_PATH", "/tmp", "noappend");
+      // check if X=$X:/tmp works for a new env var and results into :/tmp
+      checkEnv("NEW_PATH", ":/tmp", "noappend");
+      // check if X=$(tt's X var):/tmp for an old env variable inherited from 
+      // the tt
+      checkEnv("PATH",  path + ":/tmp", "noappend");
+
     }
-    
-    public void configure(JobConf conf) {
-      PATH = conf.get("path");
+
+    @Override
+    public void reduce(WritableComparable key, Iterator<Writable> values,
+                       OutputCollector<WritableComparable, Writable> output, 
+                       Reporter reporter)
+        throws IOException {
     }
+    
   }
-
+  
   @Override
   public void setUp() {
     try {
@@ -269,7 +349,7 @@
       Path inDir = new Path("testing/wc/input1");
       Path outDir = new Path("testing/wc/output1");
       FileSystem outFs = outDir.getFileSystem(conf);
-      runTestTaskEnv(conf, inDir, outDir);
+      runTestTaskEnv(conf, inDir, outDir, false);
       outFs.delete(outDir, true);
     } catch(Exception e) {
       e.printStackTrace();
@@ -278,19 +358,60 @@
     }
   }
   
-  void runTestTaskEnv(JobConf conf, Path inDir, Path outDir) throws IOException {
+  /**
+   * Test to test if the user set *old* env variables reflect in the child
+   * processes. Mainly
+   *   - x=y (x can be a already existing env variable or a new variable)
+   *   - x=$x:y (replace $x with the current value of x)
+   */
+  public void testTaskOldEnv(){
+    try {
+      JobConf conf = mr.createJobConf();
+      // initialize input, output directories
+      Path inDir = new Path("testing/wc/input1");
+      Path outDir = new Path("testing/wc/output1");
+      FileSystem outFs = outDir.getFileSystem(conf);
+      runTestTaskEnv(conf, inDir, outDir, true);
+      outFs.delete(outDir, true);
+    } catch(Exception e) {
+      e.printStackTrace();
+      fail("Exception in testing child env");
+      tearDown();
+    }
+  }
+  
+  void runTestTaskEnv(JobConf conf, Path inDir, Path outDir, boolean oldConfigs) 
+  throws IOException {
     String input = "The input";
-    configure(conf, inDir, outDir, input, EnvCheckMapper.class);
+    configure(conf, inDir, outDir, input, 
+              EnvCheckMapper.class, EnvCheckReducer.class);
     // test 
     //  - new SET of new var (MY_PATH)
     //  - set of old var (HOME)
     //  - append to an old var from modified env (LD_LIBRARY_PATH)
     //  - append to an old var from tt's env (PATH)
     //  - append to a new var (NEW_PATH)
-    conf.set("mapred.child.env", 
-             "MY_PATH=/tmp,HOME=/tmp,LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp,"
-             + "PATH=$PATH:/tmp,NEW_PATH=$NEW_PATH:/tmp");
+    String mapTaskEnvKey = JobConf.MAPRED_MAP_TASK_ENV;
+    String reduceTaskEnvKey = JobConf.MAPRED_MAP_TASK_ENV;
+    String mapTaskJavaOptsKey = JobConf.MAPRED_MAP_TASK_JAVA_OPTS;
+    String reduceTaskJavaOptsKey = JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS;
+    String mapTaskJavaOpts = MAP_OPTS_VAL;
+    String reduceTaskJavaOpts = REDUCE_OPTS_VAL;
+    conf.setBoolean(OLD_CONFIGS, oldConfigs);
+    if (oldConfigs) {
+      mapTaskEnvKey = reduceTaskEnvKey = JobConf.MAPRED_TASK_ENV;
+      mapTaskJavaOptsKey = reduceTaskJavaOptsKey = JobConf.MAPRED_TASK_JAVA_OPTS;
+      mapTaskJavaOpts = reduceTaskJavaOpts = TASK_OPTS_VAL;
+    }
+    conf.set(mapTaskEnvKey, 
+             "MY_PATH=/tmp,HOME=/tmp,LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp," +
+             "PATH=$PATH:/tmp,NEW_PATH=$NEW_PATH:/tmp");
+    conf.set(reduceTaskEnvKey, 
+             "MY_PATH=/tmp,HOME=/tmp,LD_LIBRARY_PATH=$LD_LIBRARY_PATH:/tmp," +
+             "PATH=$PATH:/tmp,NEW_PATH=$NEW_PATH:/tmp");
     conf.set("path", System.getenv("PATH"));
+    conf.set(mapTaskJavaOptsKey, mapTaskJavaOpts);
+    conf.set(reduceTaskJavaOptsKey, reduceTaskJavaOpts);
     RunningJob job = JobClient.runJob(conf);
     assertTrue("The environment checker job failed.", job.isSuccessful());
   }

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetch.java Mon Aug 17 03:56:29 2009
@@ -35,7 +35,7 @@
     JobConf job = mrCluster.createJobConf();
     job.set("mapred.job.reduce.input.buffer.percent", "0.0");
     job.setNumMapTasks(MAP_TASKS);
-    job.set("mapred.child.java.opts", "-Xmx128m");
+    job.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, "-Xmx128m");
     job.setInt("mapred.job.reduce.total.mem.bytes", 128 << 20);
     job.set("mapred.job.shuffle.input.buffer.percent", "0.14");
     job.setInt("io.sort.factor", 2);

Modified: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java?rev=804844&r1=804843&r2=804844&view=diff
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java (original)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapred/TestReduceFetchFromPartialMem.java Mon Aug 17 03:56:29 2009
@@ -84,7 +84,7 @@
     job.set("mapred.job.reduce.input.buffer.percent", "1.0");
     job.setInt("mapred.reduce.parallel.copies", 1);
     job.setInt("io.sort.mb", 10);
-    job.set("mapred.child.java.opts", "-Xmx128m");
+    job.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, "-Xmx128m");
     job.setInt("mapred.job.reduce.total.mem.bytes", 128 << 20);
     job.set("mapred.job.shuffle.input.buffer.percent", "0.14");
     job.set("mapred.job.shuffle.merge.percent", "1.0");

Added: hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java?rev=804844&view=auto
==============================================================================
--- hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java (added)
+++ hadoop/mapreduce/trunk/src/test/mapred/org/apache/hadoop/mapreduce/TestChild.java Mon Aug 17 03:56:29 2009
@@ -0,0 +1,142 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce;
+
+import java.io.File;
+import java.io.IOException;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileStatus;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.PathFilter;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.HadoopTestCase;
+import org.apache.hadoop.mapred.JobConf;
+
+public class TestChild extends HadoopTestCase {
+  private static String TEST_ROOT_DIR =
+    new File(System.getProperty("test.build.data","/tmp"))
+    .toURI().toString().replace(' ', '+');
+  private final Path inDir = new Path(TEST_ROOT_DIR, "./wc/input");
+  private final Path outDir = new Path(TEST_ROOT_DIR, "./wc/output");
+  
+  private final static String OLD_CONFIGS = "test.old.configs";
+  private final static String TASK_OPTS_VAL = "-Xmx200m";
+  private final static String MAP_OPTS_VAL = "-Xmx200m";
+  private final static String REDUCE_OPTS_VAL = "-Xmx300m";
+  
+  public TestChild() throws IOException {
+    super(HadoopTestCase.CLUSTER_MR , HadoopTestCase.LOCAL_FS, 2, 2);
+  }
+
+  static class MyMapper extends Mapper<LongWritable, Text, LongWritable, Text> {
+
+    @Override
+    protected void setup(Context context) throws IOException,
+        InterruptedException {
+      Configuration conf = context.getConfiguration();
+      boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false);
+      if (oldConfigs) {
+        String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
+                      javaOpts);
+        assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + 
+                     javaOpts, 
+                     javaOpts, TASK_OPTS_VAL);
+      } else {
+        String mapJavaOpts = conf.get(JobConf.MAPRED_MAP_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " is null!", 
+                      mapJavaOpts);
+        assertEquals(JobConf.MAPRED_MAP_TASK_JAVA_OPTS + " has value of: " + 
+                     mapJavaOpts, 
+                     mapJavaOpts, MAP_OPTS_VAL);
+      }
+    }
+  }
+  
+  static class MyReducer 
+  extends Reducer<LongWritable, Text, LongWritable, Text> {
+
+    @Override
+    protected void setup(Context context)
+        throws IOException, InterruptedException {
+      Configuration conf = context.getConfiguration();
+      boolean oldConfigs = conf.getBoolean(OLD_CONFIGS, false);
+      if (oldConfigs) {
+        String javaOpts = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_TASK_JAVA_OPTS + " is null!", 
+                      javaOpts);
+        assertEquals(JobConf.MAPRED_TASK_JAVA_OPTS + " has value of: " + 
+                     javaOpts, 
+                     javaOpts, TASK_OPTS_VAL);
+      } else {
+        String reduceJavaOpts = conf.get(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS);
+        assertNotNull(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " is null!", 
+                      reduceJavaOpts);
+        assertEquals(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS + " has value of: " + 
+                     reduceJavaOpts, 
+                     reduceJavaOpts, REDUCE_OPTS_VAL);
+      }
+    }
+  }
+  
+  private Job submitAndValidateJob(JobConf conf, int numMaps, int numReds, 
+                                   boolean oldConfigs) 
+      throws IOException, InterruptedException, ClassNotFoundException {
+    conf.setBoolean(OLD_CONFIGS, oldConfigs);
+    if (oldConfigs) {
+      conf.set(JobConf.MAPRED_TASK_JAVA_OPTS, TASK_OPTS_VAL);
+    } else {
+      conf.set(JobConf.MAPRED_MAP_TASK_JAVA_OPTS, MAP_OPTS_VAL);
+      conf.set(JobConf.MAPRED_REDUCE_TASK_JAVA_OPTS, REDUCE_OPTS_VAL);
+    }
+    
+    Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 
+                numMaps, numReds);
+    job.setMapperClass(MyMapper.class);
+    job.setReducerClass(MyReducer.class);
+    job.waitForCompletion(true);
+    assertTrue(job.isSuccessful());
+
+    // Check output directory
+    FileSystem fs = FileSystem.get(conf);
+    assertTrue("Job output directory doesn't exit!", fs.exists(outDir));
+    FileStatus[] list = fs.listStatus(outDir, new OutputFilter());
+    int numPartFiles = numReds == 0 ? numMaps : numReds;
+    assertTrue("Number of part-files is " + list.length + " and not "
+        + numPartFiles, list.length == numPartFiles);
+    return job;
+  }
+  
+  public void testChild() throws Exception {
+    try {
+      submitAndValidateJob(createJobConf(), 1, 1, true);
+      submitAndValidateJob(createJobConf(), 1, 1, false);
+    } finally {
+      tearDown();
+    }
+  }
+  
+  private static class OutputFilter implements PathFilter {
+    public boolean accept(Path path) {
+      return !(path.getName().startsWith("_"));
+    }
+  }
+}