You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by dd...@apache.org on 2009/04/30 13:15:04 UTC

svn commit: r770167 - in /hadoop/core/trunk: ./ src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/ src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/ src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/ src/mapred/org/a...

Author: ddas
Date: Thu Apr 30 11:15:03 2009
New Revision: 770167

URL: http://svn.apache.org/viewvc?rev=770167&view=rev
Log:
HADOOP-5720. Introduces new task types - JOB_SETUP, JOB_CLEANUP and TASK_CLEANUP. Removes the isMap methods from TaskID/TaskAttemptID classes. Contributed by Devaraj Das.

Added:
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskType.java
Removed:
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/TaskType.java
Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairSchedulerServlet.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/NewJobWeightBooster.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/WeightAdjuster.java
    hadoop/core/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobSubmissionProtocol.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskID.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskAttemptID.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskID.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobHistory.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobQueueTaskScheduler.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestTaskFail.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Thu Apr 30 11:15:03 2009
@@ -40,6 +40,10 @@
     HADOOP-5699. Change org.apache.hadoop.examples.PiEstimator to use 
     new mapreduce api. (Amareshwari Sriramadasu via sharad)
 
+    HADOOP-5720. Introduces new task types - JOB_SETUP, JOB_CLEANUP
+    and TASK_CLEANUP. Removes the isMap methods from TaskID/TaskAttemptID
+    classes. (ddas)
+
   NEW FEATURES
 
     HADOOP-4268. Change fsck to use ClientProtocol methods so that the

Modified: hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java (original)
+++ hadoop/core/trunk/src/contrib/capacity-scheduler/src/test/org/apache/hadoop/mapred/TestCapacityScheduler.java Thu Apr 30 11:15:03 2009
@@ -36,6 +36,7 @@
 
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.JobStatusChangeEvent.EventType;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.conf.Configuration;
 
@@ -233,8 +234,12 @@
     
     private TaskAttemptID getTaskAttemptID(boolean isMap) {
       JobID jobId = getJobID();
+      TaskType t = TaskType.REDUCE;
+      if (isMap) {
+        t = TaskType.MAP;
+      }
       return new TaskAttemptID(jobId.getJtIdentifier(),
-          jobId.getId(), isMap, (isMap)?++mapTaskCtr: ++redTaskCtr, 0);
+          jobId.getId(), t, (isMap)?++mapTaskCtr: ++redTaskCtr, 0);
     }
     
     @Override

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairScheduler.java Thu Apr 30 11:15:03 2009
@@ -36,6 +36,7 @@
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer;
 import org.apache.hadoop.mapred.JobStatus;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /**
@@ -525,6 +526,9 @@
     PoolManager poolMgr = getPoolManager();
     for (Pool pool: poolMgr.getPools()) {
       for (final TaskType type: TaskType.values()) {
+        if (type != TaskType.MAP && type != TaskType.REDUCE) {
+          continue;
+        }
         Set<JobInProgress> jobs = new HashSet<JobInProgress>(pool.getJobs());
         int slotsLeft = poolMgr.getAllocation(pool.getName(), type);
         // Keep assigning slots until none are left
@@ -615,6 +619,9 @@
     // fair allocation, and at this point we know that we've met everyone's
     // guarantee and we've split the excess capacity fairly among jobs left.
     for (TaskType type: TaskType.values()) {
+      if (type != TaskType.MAP && type != TaskType.REDUCE) {
+        continue;
+      }
       // Select only jobs that still need this type of task
       HashSet<JobInfo> jobsLeft = new HashSet<JobInfo>();
       for (Entry<JobInProgress, JobInfo> entry: infos.entrySet()) {

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairSchedulerServlet.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairSchedulerServlet.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairSchedulerServlet.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/FairSchedulerServlet.java Thu Apr 30 11:15:03 2009
@@ -37,6 +37,7 @@
 import javax.servlet.http.HttpServletResponse;
 
 import org.apache.hadoop.mapred.FairScheduler.JobInfo;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.util.StringUtils;
 
 /**

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/NewJobWeightBooster.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/NewJobWeightBooster.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/NewJobWeightBooster.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/NewJobWeightBooster.java Thu Apr 30 11:15:03 2009
@@ -20,6 +20,7 @@
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.conf.Configured;
+import org.apache.hadoop.mapreduce.TaskType;
 
 /**
  * A {@link WeightAdjuster} implementation that gives a weight boost to new jobs

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/PoolManager.java Thu Apr 30 11:15:03 2009
@@ -34,6 +34,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.w3c.dom.Document;
 import org.w3c.dom.Element;
 import org.w3c.dom.Node;

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/WeightAdjuster.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/WeightAdjuster.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/WeightAdjuster.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/java/org/apache/hadoop/mapred/WeightAdjuster.java Thu Apr 30 11:15:03 2009
@@ -19,6 +19,7 @@
 package org.apache.hadoop.mapred;
 
 import org.apache.hadoop.conf.Configurable;
+import org.apache.hadoop.mapreduce.TaskType;
 
 /**
  * A pluggable object for altering the weights of jobs in the fair scheduler,

Modified: hadoop/core/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java (original)
+++ hadoop/core/trunk/src/contrib/fairscheduler/src/test/org/apache/hadoop/mapred/TestFairScheduler.java Thu Apr 30 11:15:03 2009
@@ -33,6 +33,7 @@
 import org.apache.hadoop.io.BytesWritable;
 import org.apache.hadoop.mapred.JobStatus;
 import org.apache.hadoop.mapred.FairScheduler.JobInfo;
+import org.apache.hadoop.mapreduce.TaskType;
 
 public class TestFairScheduler extends TestCase {
   final static String TEST_DIR = new File(System.getProperty("test.build.data",
@@ -95,8 +96,12 @@
     
     private TaskAttemptID getTaskAttemptID(boolean isMap) {
       JobID jobId = getJobID();
+      TaskType t = TaskType.REDUCE;
+      if (isMap) {
+        t = TaskType.MAP;
+      }
       return new TaskAttemptID(jobId.getJtIdentifier(),
-          jobId.getId(), isMap, ++taskCounter, 0);
+          jobId.getId(), t, ++taskCounter, 0);
     }
   }
   

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/Child.java Thu Apr 30 11:15:03 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.ipc.RPC;
 import org.apache.hadoop.mapred.JvmTask;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsUtil;
 import org.apache.hadoop.metrics.jvm.JvmMetrics;
@@ -58,7 +59,8 @@
     final TaskAttemptID firstTaskid = TaskAttemptID.forName(args[2]);
     final int SLEEP_LONGER_COUNT = 5;
     int jvmIdInt = Integer.parseInt(args[3]);
-    JVMId jvmId = new JVMId(firstTaskid.getJobID(),firstTaskid.isMap(),jvmIdInt);
+    JVMId jvmId = new JVMId(firstTaskid.getJobID(),
+        firstTaskid.getTaskType() == TaskType.MAP,jvmIdInt);
     TaskUmbilicalProtocol umbilical =
       (TaskUmbilicalProtocol)RPC.getProxy(TaskUmbilicalProtocol.class,
           TaskUmbilicalProtocol.versionID,

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/InterTrackerProtocol.java Thu Apr 30 11:15:03 2009
@@ -61,8 +61,9 @@
    *            (HADOOP-4869) 
    * Version 24: Changed format of Task and TaskStatus for HADOOP-4759 
    * Version 25: JobIDs are passed in response to JobTracker restart 
+   * Version 26: Modified TaskID to be aware of the new TaskTypes
    */
-  public static final long versionID = 25L;
+  public static final long versionID = 26L;
   
   public final static int TRACKERS_OK = 0;
   public final static int UNKNOWN_TASKTRACKER = 1;

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java Thu Apr 30 11:15:03 2009
@@ -38,6 +38,7 @@
 import org.apache.hadoop.fs.LocalFileSystem;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapred.JobHistory.Values;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.metrics.MetricsContext;
 import org.apache.hadoop.metrics.MetricsRecord;
 import org.apache.hadoop.metrics.MetricsUtil;
@@ -2518,7 +2519,7 @@
    * Return the TaskInProgress that matches the tipid.
    */
   public synchronized TaskInProgress getTaskInProgress(TaskID tipid) {
-    if (tipid.isMap()) {
+    if (tipid.getTaskType() == TaskType.MAP) {
       if (tipid.equals(cleanup[0].getTIPId())) { // cleanup map tip
         return cleanup[0]; 
       }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobSubmissionProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobSubmissionProtocol.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobSubmissionProtocol.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobSubmissionProtocol.java Thu Apr 30 11:15:03 2009
@@ -57,9 +57,10 @@
    *             method getClusterStatus() to take a boolean argument
    *             for HADOOP-4807
    * Version 20: Modified ClusterStatus to have the tasktracker expiry
-   *             interval for HADOOP-4939                     
+   *             interval for HADOOP-4939
+   * Version 21: Modified TaskID to be aware of the new TaskTypes                                 
    */
-  public static final long versionID = 20L;
+  public static final long versionID = 21L;
 
   /**
    * Allocate a name for the job.

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java Thu Apr 30 11:15:03 2009
@@ -34,6 +34,7 @@
 import org.apache.hadoop.mapred.JobTrackerMetricsInst;
 import org.apache.hadoop.mapred.JvmTask;
 import org.apache.hadoop.mapred.JobClient.RawSplit;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.util.ReflectionUtils;
 
 /** Implements MapReduce locally, in-process, for debugging. */ 
@@ -161,7 +162,8 @@
         
         for (int i = 0; i < rawSplits.length; i++) {
           if (!this.isInterrupted()) {
-            TaskAttemptID mapId = new TaskAttemptID(new TaskID(jobId, true, i),0);  
+            TaskAttemptID mapId = new TaskAttemptID(
+                new TaskID(jobId, TaskType.MAP, i),0);  
             mapIds.add(mapId);
             MapTask map = new MapTask(file.toString(),  
                                       mapId, i,
@@ -182,7 +184,7 @@
           }
         }
         TaskAttemptID reduceId = 
-          new TaskAttemptID(new TaskID(jobId, false, 0), 0);
+          new TaskAttemptID(new TaskID(jobId, TaskType.REDUCE, 0), 0);
         try {
           if (numReduceTasks > 0) {
             // move map output to reduce input  

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java Thu Apr 30 11:15:03 2009
@@ -21,6 +21,8 @@
 import java.io.DataInput;
 import java.io.IOException;
 
+import org.apache.hadoop.mapreduce.TaskType;
+
 /**
  * TaskAttemptID represents the immutable and unique identifier for 
  * a task attempt. Each task attempt is one particular instance of a Map or
@@ -57,13 +59,13 @@
    * Constructs a TaskId object from given parts.
    * @param jtIdentifier jobTracker identifier
    * @param jobId job number 
-   * @param isMap whether the tip is a map 
+   * @param type the TaskType 
    * @param taskId taskId number
    * @param id the task attempt number
    */
-  public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, 
+  public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, 
                        int taskId, int id) {
-    this(new TaskID(jtIdentifier, jobId, isMap, taskId), id);
+    this(new TaskID(jtIdentifier, jobId, type, taskId), id);
   }
   
   public TaskAttemptID() { 
@@ -116,32 +118,32 @@
    * of <i>any jobtracker</i>, in <i>any job</i>, of the <i>first 
    * map task</i>, we would use :
    * <pre> 
-   * TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
+   * TaskAttemptID.getTaskAttemptIDsPattern(null, null, TaskType.MAP, 1, null);
    * </pre>
    * which will return :
    * <pre> "attempt_[^_]*_[0-9]*_m_000001_[0-9]*" </pre> 
    * @param jtIdentifier jobTracker identifier, or null
    * @param jobId job number, or null
-   * @param isMap whether the tip is a map, or null 
+   * @param type the {@link TaskType} 
    * @param taskId taskId number, or null
    * @param attemptId the task attempt number, or null
    * @return a regex pattern matching TaskAttemptIDs
    */
   @Deprecated
   public static String getTaskAttemptIDsPattern(String jtIdentifier,
-      Integer jobId, Boolean isMap, Integer taskId, Integer attemptId) {
+      Integer jobId, TaskType type, Integer taskId, Integer attemptId) {
     StringBuilder builder = new StringBuilder(ATTEMPT).append(SEPARATOR);
     builder.append(getTaskAttemptIDsPatternWOPrefix(jtIdentifier, jobId,
-        isMap, taskId, attemptId));
+        type, taskId, attemptId));
     return builder.toString();
   }
   
   @Deprecated
   static StringBuilder getTaskAttemptIDsPatternWOPrefix(String jtIdentifier
-      , Integer jobId, Boolean isMap, Integer taskId, Integer attemptId) {
+      , Integer jobId, TaskType type, Integer taskId, Integer attemptId) {
     StringBuilder builder = new StringBuilder();
     builder.append(TaskID.getTaskIDsPatternWOPrefix(jtIdentifier
-        , jobId, isMap, taskId))
+        , jobId, type, taskId))
         .append(SEPARATOR)
         .append(attemptId != null ? attemptId : "[0-9]*");
     return builder;

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskID.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskID.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskID.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskID.java Thu Apr 30 11:15:03 2009
@@ -21,6 +21,8 @@
 import java.io.DataInput;
 import java.io.IOException;
 
+import org.apache.hadoop.mapreduce.TaskType;
+
 /**
  * TaskID represents the immutable and unique identifier for 
  * a Map or Reduce Task. Each TaskID encompasses multiple attempts made to
@@ -49,26 +51,26 @@
   /**
    * Constructs a TaskID object from given {@link JobID}.  
    * @param jobId JobID that this tip belongs to 
-   * @param isMap whether the tip is a map 
+   * @param type the {@link TaskType} 
    * @param id the tip number
    */
-  public TaskID(org.apache.hadoop.mapreduce.JobID jobId, boolean isMap,int id) {
-    super(jobId, isMap, id);
+  public TaskID(org.apache.hadoop.mapreduce.JobID jobId, TaskType type,int id) {
+    super(jobId, type, id);
   }
   
   /**
    * Constructs a TaskInProgressId object from given parts.
    * @param jtIdentifier jobTracker identifier
    * @param jobId job number 
-   * @param isMap whether the tip is a map 
+   * @param type the {@link TaskType} 
    * @param id the tip number
    */
-  public TaskID(String jtIdentifier, int jobId, boolean isMap, int id) {
-    this(new JobID(jtIdentifier, jobId), isMap, id);
+  public TaskID(String jtIdentifier, int jobId, TaskType type, int id) {
+    this(new JobID(jtIdentifier, jobId), type, id);
   }
   
   public TaskID() {
-    super(new JobID(), false, 0);
+    super(new JobID(), TaskType.REDUCE, 0);
   }
   
   /**
@@ -80,7 +82,7 @@
     if (old instanceof TaskID) {
       return (TaskID) old;
     } else {
-      return new TaskID(JobID.downgrade(old.getJobID()), old.isMap(), 
+      return new TaskID(JobID.downgrade(old.getJobID()), old.getTaskType(), 
                         old.getId());
     }
   }
@@ -108,25 +110,28 @@
    * <pre> "task_[^_]*_[0-9]*_m_000001*" </pre> 
    * @param jtIdentifier jobTracker identifier, or null
    * @param jobId job number, or null
-   * @param isMap whether the tip is a map, or null 
+   * @param type the {@link TaskType}, or null 
    * @param taskId taskId number, or null
    * @return a regex pattern matching TaskIDs
    */
   @Deprecated
   public static String getTaskIDsPattern(String jtIdentifier, Integer jobId
-      , Boolean isMap, Integer taskId) {
+      , TaskType type, Integer taskId) {
     StringBuilder builder = new StringBuilder(TASK).append(SEPARATOR)
-      .append(getTaskIDsPatternWOPrefix(jtIdentifier, jobId, isMap, taskId));
+      .append(getTaskIDsPatternWOPrefix(jtIdentifier, jobId, type, taskId));
     return builder.toString();
   }
   
   @Deprecated
   static StringBuilder getTaskIDsPatternWOPrefix(String jtIdentifier
-      , Integer jobId, Boolean isMap, Integer taskId) {
+      , Integer jobId, TaskType type, Integer taskId) {
     StringBuilder builder = new StringBuilder();
     builder.append(JobID.getJobIDsPatternWOPrefix(jtIdentifier, jobId))
       .append(SEPARATOR)
-      .append(isMap != null ? (isMap ? "m" : "r") : "(m|r)").append(SEPARATOR)
+      .append(type != null ? 
+          (org.apache.hadoop.mapreduce.TaskID.getRepresentingCharacter(type)) : 
+            org.apache.hadoop.mapreduce.TaskID.getAllTaskTypes()).
+            append(SEPARATOR)
       .append(taskId != null ? idFormat.format(taskId) : "[0-9]*");
     return builder;
   }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java Thu Apr 30 11:15:03 2009
@@ -32,6 +32,7 @@
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapred.JobClient.RawSplit;
 import org.apache.hadoop.mapred.SortedRanges.Range;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.net.Node;
 
 
@@ -219,7 +220,8 @@
    */
   void init(JobID jobId) {
     this.startTime = System.currentTimeMillis();
-    this.id = new TaskID(jobId, isMapTask(), partition);
+    this.id = new TaskID(jobId, isMapTask() ? TaskType.MAP : TaskType.REDUCE,
+        partition);
     this.skipping = startSkipping();
   }
 

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskUmbilicalProtocol.java Thu Apr 30 11:15:03 2009
@@ -54,9 +54,10 @@
    * Version 14 changed the getTask method signature for HADOOP-4232
    * Version 15 Adds FAILED_UNCLEAN and KILLED_UNCLEAN states for HADOOP-4759
    * Version 16 Change in signature of getTask() for HADOOP-5488
+   * Version 17 Modified TaskID to be aware of the new TaskTypes
    * */
 
-  public static final long versionID = 16L;
+  public static final long versionID = 17L;
   
   /**
    * Called when a child task process starts, to get its task.

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskAttemptID.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskAttemptID.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskAttemptID.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskAttemptID.java Thu Apr 30 11:15:03 2009
@@ -63,13 +63,13 @@
    * Constructs a TaskId object from given parts.
    * @param jtIdentifier jobTracker identifier
    * @param jobId job number 
-   * @param isMap whether the tip is a map 
+   * @param type the TaskType 
    * @param taskId taskId number
    * @param id the task attempt number
    */
-  public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, 
+  public TaskAttemptID(String jtIdentifier, int jobId, TaskType type, 
                        int taskId, int id) {
-    this(new TaskID(jtIdentifier, jobId, isMap, taskId), id);
+    this(new TaskID(jtIdentifier, jobId, type, taskId), id);
   }
   
   public TaskAttemptID() { 
@@ -85,12 +85,11 @@
   public TaskID getTaskID() {
     return taskId;
   }
-  
-  /**Returns whether this TaskAttemptID is a map ID */
-  public boolean isMap() {
-    return taskId.isMap();
+    
+  /**Returns the TaskType of the TaskAttemptID */
+  public TaskType getTaskType() {
+    return taskId.getTaskType();
   }
-  
   @Override
   public boolean equals(Object o) {
     if (!super.equals(o))
@@ -153,15 +152,15 @@
       String[] parts = str.split(Character.toString(SEPARATOR));
       if(parts.length == 6) {
         if(parts[0].equals(ATTEMPT)) {
-          boolean isMap = false;
-          if(parts[3].equals("m")) isMap = true;
-          else if(parts[3].equals("r")) isMap = false;
-          else throw new Exception();
-          return new org.apache.hadoop.mapred.TaskAttemptID
-                       (parts[1],
-                        Integer.parseInt(parts[2]),
-                        isMap, Integer.parseInt(parts[4]), 
-                        Integer.parseInt(parts[5]));
+          String type = parts[3];
+          TaskType t = TaskID.getTaskType(type.charAt(0));
+          if(t != null) {
+            return new org.apache.hadoop.mapred.TaskAttemptID
+            (parts[1],
+             Integer.parseInt(parts[2]),
+             t, Integer.parseInt(parts[4]), 
+             Integer.parseInt(parts[5]));  
+          } else throw new Exception();
         }
       }
     } catch (Exception ex) {

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskID.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskID.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskID.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskID.java Thu Apr 30 11:15:03 2009
@@ -22,6 +22,12 @@
 import java.io.DataOutput;
 import java.io.IOException;
 import java.text.NumberFormat;
+import java.util.EnumMap;
+import java.util.HashMap;
+import java.util.Map;
+
+import org.apache.hadoop.io.WritableUtils;
+
 
 /**
  * TaskID represents the immutable and unique identifier for 
@@ -54,32 +60,32 @@
   }
   
   private JobID jobId;
-  private boolean isMap;
-
+  private TaskType type;
+  
   /**
    * Constructs a TaskID object from given {@link JobID}.  
    * @param jobId JobID that this tip belongs to 
-   * @param isMap whether the tip is a map 
+   * @param type the {@link TaskType} of the task 
    * @param id the tip number
    */
-  public TaskID(JobID jobId, boolean isMap, int id) {
+  public TaskID(JobID jobId, TaskType type, int id) {
     super(id);
     if(jobId == null) {
       throw new IllegalArgumentException("jobId cannot be null");
     }
     this.jobId = jobId;
-    this.isMap = isMap;
+    this.type = type;
   }
   
   /**
    * Constructs a TaskInProgressId object from given parts.
    * @param jtIdentifier jobTracker identifier
    * @param jobId job number 
-   * @param isMap whether the tip is a map 
+   * @param type the TaskType 
    * @param id the tip number
    */
-  public TaskID(String jtIdentifier, int jobId, boolean isMap, int id) {
-    this(new JobID(jtIdentifier, jobId), isMap, id);
+  public TaskID(String jtIdentifier, int jobId, TaskType type, int id) {
+    this(new JobID(jtIdentifier, jobId), type, id);
   }
   
   public TaskID() { 
@@ -90,10 +96,12 @@
   public JobID getJobID() {
     return jobId;
   }
-  
-  /**Returns whether this TaskID is a map ID */
-  public boolean isMap() {
-    return isMap;
+    
+  /**
+   * Get the type of the task
+   */
+  public TaskType getTaskType() {
+    return type;
   }
   
   @Override
@@ -102,7 +110,7 @@
       return false;
 
     TaskID that = (TaskID)o;
-    return this.isMap == that.isMap && this.jobId.equals(that.jobId);
+    return this.type == that.type && this.jobId.equals(that.jobId);
   }
 
   /**Compare TaskInProgressIds by first jobIds, then by tip numbers. Reduces are 
@@ -112,10 +120,12 @@
     TaskID that = (TaskID)o;
     int jobComp = this.jobId.compareTo(that.jobId);
     if(jobComp == 0) {
-      if(this.isMap == that.isMap) {
+      if(this.type == that.type) {
         return this.id - that.id;
       }
-      else return this.isMap ? -1 : 1;
+      else {
+        return this.type.compareTo(that.type);
+      }
     }
     else return jobComp;
   }
@@ -132,7 +142,7 @@
   protected StringBuilder appendTo(StringBuilder builder) {
     return jobId.appendTo(builder).
                  append(SEPARATOR).
-                 append(isMap ? 'm' : 'r').
+                 append(CharTaskTypeMaps.getRepresentingCharacter(type)).
                  append(SEPARATOR).
                  append(idFormat.format(id));
   }
@@ -146,14 +156,14 @@
   public void readFields(DataInput in) throws IOException {
     super.readFields(in);
     jobId.readFields(in);
-    isMap = in.readBoolean();
+    type = WritableUtils.readEnum(in, TaskType.class);
   }
 
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);
     jobId.write(out);
-    out.writeBoolean(isMap);
+    WritableUtils.writeEnum(out, type);
   }
   
   /** Construct a TaskID object from given string 
@@ -168,14 +178,15 @@
       String[] parts = str.split("_");
       if(parts.length == 5) {
         if(parts[0].equals(TASK)) {
-          boolean isMap = false;
-          if(parts[3].equals("m")) isMap = true;
-          else if(parts[3].equals("r")) isMap = false;
-          else throw new Exception();
-          return new org.apache.hadoop.mapred.TaskID(parts[1], 
+          String type = parts[3];
+          TaskType t = CharTaskTypeMaps.getTaskType(type.charAt(0));
+          if(t != null) {
+          
+            return new org.apache.hadoop.mapred.TaskID(parts[1], 
                                                      Integer.parseInt(parts[2]),
-                                                     isMap, 
+                                                     t, 
                                                      Integer.parseInt(parts[4]));
+          } else throw new Exception();
         }
       }
     }catch (Exception ex) {//fall below
@@ -183,5 +194,64 @@
     throw new IllegalArgumentException("TaskId string : " + str 
         + " is not properly formed");
   }
+  /**
+   * Gets the character representing the {@link TaskType}
+   * @param type the TaskType
+   * @return the character
+   */
+  public static char getRepresentingCharacter(TaskType type) {
+    return CharTaskTypeMaps.getRepresentingCharacter(type);
+  }
+  /**
+   * Gets the {@link TaskType} corresponding to the character
+   * @param c the character
+   * @return the TaskType
+   */
+  public static TaskType getTaskType(char c) {
+    return CharTaskTypeMaps.getTaskType(c);
+  }
   
+  public static String getAllTaskTypes() {
+    return CharTaskTypeMaps.allTaskTypes;
+  }
+
+  /**
+   * Maintains the mapping from the character representation of a task type to 
+   * the enum class TaskType constants
+   */
+  static class CharTaskTypeMaps {
+    private static EnumMap<TaskType, Character> typeToCharMap = 
+      new EnumMap<TaskType,Character>(TaskType.class);
+    private static Map<Character, TaskType> charToTypeMap = 
+      new HashMap<Character, TaskType>();
+    static String allTaskTypes = "(m|r|s|c|t)";
+    static {
+      setupTaskTypeToCharMapping();
+      setupCharToTaskTypeMapping();
+    }
+    
+    private static void setupTaskTypeToCharMapping() {
+      typeToCharMap.put(TaskType.MAP, 'm');
+      typeToCharMap.put(TaskType.REDUCE, 'r');
+      typeToCharMap.put(TaskType.JOB_SETUP, 's');
+      typeToCharMap.put(TaskType.JOB_CLEANUP, 'c');
+      typeToCharMap.put(TaskType.TASK_CLEANUP, 't');
+    }
+
+    private static void setupCharToTaskTypeMapping() {
+      charToTypeMap.put('m', TaskType.MAP);
+      charToTypeMap.put('r', TaskType.REDUCE);
+      charToTypeMap.put('s', TaskType.JOB_SETUP);
+      charToTypeMap.put('c', TaskType.JOB_CLEANUP);
+      charToTypeMap.put('t', TaskType.TASK_CLEANUP);
+    }
+
+    static char getRepresentingCharacter(TaskType type) {
+      return typeToCharMap.get(type);
+    }
+    static TaskType getTaskType(char c) {
+      return charToTypeMap.get(c);
+    }
+  }
+
 }

Added: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskType.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskType.java?rev=770167&view=auto
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskType.java (added)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/TaskType.java Thu Apr 30 11:15:03 2009
@@ -0,0 +1,26 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce;
+
+/**
+ * Enum for map, reduce, job-setup, job-cleanup, task-cleanup task types.
+ */
+public enum TaskType {
+  MAP, REDUCE, JOB_SETUP, JOB_CLEANUP, TASK_CLEANUP
+}

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapreduce/lib/output/FileOutputFormat.java Thu Apr 30 11:15:03 2009
@@ -224,7 +224,7 @@
    * @param context the task that is calling this
    * @param name the base filename
    * @param extension the filename extension
-   * @return a string like $name-[mr]-$id$extension
+   * @return a string like $name-[mrsct]-$id$extension
    */
   public synchronized static String getUniqueFile(TaskAttemptContext context,
                                                   String name,
@@ -234,7 +234,8 @@
     StringBuilder result = new StringBuilder();
     result.append(name);
     result.append('-');
-    result.append(taskId.isMap() ? 'm' : 'r');
+    result.append(
+        TaskID.getRepresentingCharacter(taskId.getTaskType()));
     result.append('-');
     result.append(NUMBER_FORMAT.format(partition));
     result.append(extension);

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java Thu Apr 30 11:15:03 2009
@@ -26,6 +26,7 @@
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.hadoop.net.DNSToSwitchMapping;
 import org.apache.hadoop.net.NetUtils;
 import org.apache.hadoop.net.NetworkTopology;
@@ -510,7 +511,7 @@
   throws IOException {
     String jtId = jobTracker.getJobTracker().getTrackerIdentifier();
     TaskAttemptID dummy = 
-      new TaskAttemptID(jtId, jobId.getId(), false, 0, 0);
+      new TaskAttemptID(jtId, jobId.getId(), TaskType.REDUCE, 0, 0);
     return taskTrackerList.get(index).getTaskTracker()
                                      .getMapCompletionEvents(jobId, 0, max, 
                                                              dummy);

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobHistory.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobHistory.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobHistory.java Thu Apr 30 11:15:03 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.fs.permission.FsPermission;
 import org.apache.hadoop.mapred.JobHistory.*;
+import org.apache.hadoop.mapreduce.TaskType;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 
@@ -575,8 +576,8 @@
 
     // Get the 1st map, 1st reduce, cleanup & setup taskIDs and
     // validate their history info
-    TaskID mapTaskId = new TaskID(job.getID(), true, 0);
-    TaskID reduceTaskId = new TaskID(job.getID(), false, 0);
+    TaskID mapTaskId = new TaskID(job.getID(), TaskType.MAP, 0);
+    TaskID reduceTaskId = new TaskID(job.getID(), TaskType.REDUCE, 0);
 
     TaskInProgress cleanups[] = jip.getCleanupTasks();
     TaskID cleanupTaskId;

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobQueueTaskScheduler.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobQueueTaskScheduler.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobQueueTaskScheduler.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestJobQueueTaskScheduler.java Thu Apr 30 11:15:03 2009
@@ -27,6 +27,7 @@
 import junit.framework.TestCase;
 
 import org.apache.hadoop.io.BytesWritable;
+import org.apache.hadoop.mapreduce.TaskType;
 
 public class TestJobQueueTaskScheduler extends TestCase {
   
@@ -74,7 +75,7 @@
     @Override
     public Task obtainNewMapTask(final TaskTrackerStatus tts, int clusterSize,
         int ignored) throws IOException {
-      TaskAttemptID attemptId = getTaskAttemptID(true);
+      TaskAttemptID attemptId = getTaskAttemptID(TaskType.MAP);
       Task task = new MapTask("", attemptId, 0, "", new BytesWritable()) {
         @Override
         public String toString() {
@@ -89,7 +90,7 @@
     @Override
     public Task obtainNewReduceTask(final TaskTrackerStatus tts,
         int clusterSize, int ignored) throws IOException {
-      TaskAttemptID attemptId = getTaskAttemptID(false);
+      TaskAttemptID attemptId = getTaskAttemptID(TaskType.REDUCE);
       Task task = new ReduceTask("", attemptId, 0, 10) {
         @Override
         public String toString() {
@@ -101,10 +102,10 @@
       return task;
     }
     
-    private TaskAttemptID getTaskAttemptID(boolean isMap) {
+    private TaskAttemptID getTaskAttemptID(TaskType type) {
       JobID jobId = getJobID();
       return new TaskAttemptID(jobId.getJtIdentifier(),
-          jobId.getId(), isMap, ++taskCounter, 0);
+          jobId.getId(), type, ++taskCounter, 0);
     }
   }
   

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java Thu Apr 30 11:15:03 2009
@@ -35,6 +35,7 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.TaskType;
 
 /**
  * Class to test mapred debug Script
@@ -166,7 +167,8 @@
 
     JobID jobId = job.getID();
     // construct the task id of first map task of failmap
-    TaskAttemptID taskId = new TaskAttemptID(new TaskID(jobId,true, 0), 0);
+    TaskAttemptID taskId = new TaskAttemptID(
+        new TaskID(jobId,TaskType.MAP, 0), 0);
     // wait for the job to finish.
     while (!job.isComplete()) ;
     

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java Thu Apr 30 11:15:03 2009
@@ -39,6 +39,7 @@
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.io.IntWritable;
 import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapreduce.TaskType;
 
 /**
  * A JUnit test to test Mini Map-Reduce Cluster with Mini-DFS.
@@ -185,7 +186,7 @@
     // Run a word count example
     // Keeping tasks that match this pattern
     String pattern = 
-      TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
+      TaskAttemptID.getTaskAttemptIDsPattern(null, null, TaskType.MAP, 1, null);
     jobConf.setKeepTaskFilesPattern(pattern);
     TestResult result;
     final Path inDir = new Path("./wc/input");
@@ -195,7 +196,8 @@
     assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
                  "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result.output);
     JobID jobid = result.job.getID();
-    TaskAttemptID taskid = new TaskAttemptID(new TaskID(jobid, true, 1),0);
+    TaskAttemptID taskid = new TaskAttemptID(
+        new TaskID(jobid, TaskType.MAP, 1),0);
     checkTaskDirectories(mr, new String[]{jobid.toString()}, 
                          new String[]{taskid.toString()});
     // test with maps=0

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestTaskFail.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestTaskFail.java?rev=770167&r1=770166&r2=770167&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestTaskFail.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestTaskFail.java Thu Apr 30 11:15:03 2009
@@ -30,6 +30,7 @@
 import org.apache.hadoop.io.LongWritable;
 import org.apache.hadoop.io.Text;
 import org.apache.hadoop.mapred.lib.IdentityReducer;
+import org.apache.hadoop.mapreduce.TaskType;
 
 public class TestTaskFail extends TestCase {
   private static String taskLog = "Task attempt log";
@@ -112,7 +113,7 @@
     JobID jobId = job.getID();
     // construct the task id of first map task
     TaskAttemptID attemptId = 
-      new TaskAttemptID(new TaskID(jobId, true, 0), 0);
+      new TaskAttemptID(new TaskID(jobId, TaskType.MAP, 0), 0);
     TaskInProgress tip = mr.getJobTrackerRunner().getJobTracker().
                             getTip(attemptId.getTaskID());
     // this should not be cleanup attempt since the first attempt 
@@ -129,7 +130,7 @@
     assertTrue(log.contains(taskLog));
     assertTrue(log.contains(cleanupLog));
     
-    attemptId =  new TaskAttemptID(new TaskID(jobId, true, 0), 1);
+    attemptId =  new TaskAttemptID(new TaskID(jobId, TaskType.MAP, 0), 1);
     // this should be cleanup attempt since the second attempt fails
     // with System.exit
     assertTrue(tip.isCleanupAttempt(attemptId));