You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2008/10/06 07:38:50 UTC

svn commit: r701948 - in /hadoop/core/trunk: ./ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/

Author: omalley
Date: Sun Oct  5 22:38:50 2008
New Revision: 701948

URL: http://svn.apache.org/viewvc?rev=701948&view=rev
Log:
HADOOP-4209. Remove the change to the format of task attempt id by 
incrementing the task attempt numbers by 1000 when the job restarts.
(Amar Kamat via omalley)

Modified:
    hadoop/core/trunk/CHANGES.txt
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobHistory.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobTracker.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java
    hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java
    hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java

Modified: hadoop/core/trunk/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/CHANGES.txt?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/CHANGES.txt (original)
+++ hadoop/core/trunk/CHANGES.txt Sun Oct  5 22:38:50 2008
@@ -474,6 +474,10 @@
     HADOOP-4279. Trace the seeds of random sequences in append unit tests to
     make itermitant failures reproducible. (szetszwo via cdouglas)
 
+    HADOOP-4209. Remove the change to the format of task attempt id by 
+    incrementing the task attempt numbers by 1000 when the job restarts.
+    (Amar Kamat via omalley)
+
   OPTIMIZATIONS
 
     HADOOP-3556. Removed lock contention in MD5Hash by changing the 

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobHistory.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobHistory.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobHistory.java Sun Oct  5 22:38:50 2008
@@ -114,7 +114,7 @@
     FINISHED_MAPS, FINISHED_REDUCES, JOB_STATUS, TASKID, HOSTNAME, TASK_TYPE, 
     ERROR, TASK_ATTEMPT_ID, TASK_STATUS, COPY_PHASE, SORT_PHASE, REDUCE_PHASE, 
     SHUFFLE_FINISHED, SORT_FINISHED, COUNTERS, SPLITS, JOB_PRIORITY, HTTP_PORT, 
-    TRACKER_NAME, STATE_STRING, VERSION
+    TRACKER_NAME, STATE_STRING, VERSION, RESTART_COUNT
   }
 
   /**
@@ -1063,9 +1063,10 @@
      * @param jobid job id
      * @param submitTime job's submit time
      * @param launchTime job's launch time
+     * @param restartCount number of times the job got restarted
      */
-    public static void logJobSubmitTime(JobID jobid, long submitTime, 
-                                        long launchTime){
+    public static void logJobInfo(JobID jobid, long submitTime, long launchTime,
+                                  int restartCount){
       if (!disableHistory){
         String logFileKey =  JOBTRACKER_UNIQUE_STRING + jobid; 
         ArrayList<PrintWriter> writer = openJobs.get(logFileKey); 
@@ -1073,10 +1074,11 @@
         if (null != writer){
           JobHistory.log(writer, RecordTypes.Job,
                          new Keys[] {Keys.JOBID, Keys.SUBMIT_TIME, 
-                                     Keys.LAUNCH_TIME},
+                                     Keys.LAUNCH_TIME, Keys.RESTART_COUNT},
                          new String[] {jobid.toString(), 
                                        String.valueOf(submitTime), 
-                                       String.valueOf(launchTime)});
+                                       String.valueOf(launchTime),
+                                       String.valueOf(restartCount)});
         }
       }
     }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobInProgress.java Sun Oct  5 22:38:50 2008
@@ -134,6 +134,9 @@
   long startTime;
   long launchTime;
   long finishTime;
+  
+  // Indicates how many times the job got restarted
+  private int restartCount = 0;
 
   private JobConf conf;
   AtomicBoolean tasksInited = new AtomicBoolean(false);
@@ -493,13 +496,22 @@
   }
   
   // Update the job start/launch time (upon restart) and log to history
-  synchronized void updateJobTime(long startTime, long launchTime) {
+  synchronized void updateJobInfo(long startTime, long launchTime, int count) {
     // log and change to the job's start/launch time
     this.startTime = startTime;
     this.launchTime = launchTime;
-    JobHistory.JobInfo.logJobSubmitTime(jobId, startTime, launchTime);
+    // change to the job's restart count
+    this.restartCount = count;
+    JobHistory.JobInfo.logJobInfo(jobId, startTime, launchTime, count);
   }
 
+  /**
+   * Get the number of times the job has restarted
+   */
+  int numRestarts() {
+    return restartCount;
+  }
+  
   long getInputLength() {
     return inputLength;
   }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobTracker.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/JobTracker.java Sun Oct  5 22:38:50 2008
@@ -615,8 +615,10 @@
     
     private void updateJob(JobInProgress jip, JobHistory.JobInfo job) {
       // Set the start/launch time only if there are recovered tasks
-      jip.updateJobTime(job.getLong(JobHistory.Keys.SUBMIT_TIME), 
-                        job.getLong(JobHistory.Keys.LAUNCH_TIME));
+      // Increment the job's restart count
+      jip.updateJobInfo(job.getLong(JobHistory.Keys.SUBMIT_TIME), 
+                        job.getLong(JobHistory.Keys.LAUNCH_TIME),
+                        job.getInt(Keys.RESTART_COUNT) + 1);
       
       // Change the job priority
       String jobpriority = job.get(Keys.JOB_PRIORITY);

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/LocalJobRunner.java Sun Oct  5 22:38:50 2008
@@ -105,7 +105,6 @@
         InputSplit[] splits;
         splits = job.getInputFormat().getSplits(job, 1);
         JobID jobId = profile.getJobID();
-        long timstamp = System.currentTimeMillis();
         
         int numReduceTasks = job.getNumReduceTasks();
         if (numReduceTasks > 1 || numReduceTasks < 0) {
@@ -118,8 +117,7 @@
         
         DataOutputBuffer buffer = new DataOutputBuffer();
         for (int i = 0; i < splits.length; i++) {
-          TaskAttemptID mapId = new TaskAttemptID(new TaskID(jobId, true, i), 
-                                                  0, timstamp);  
+          TaskAttemptID mapId = new TaskAttemptID(new TaskID(jobId, true, i),0);  
           mapIds.add(mapId);
           buffer.reset();
           splits[i].write(buffer);
@@ -140,8 +138,8 @@
           map_tasks -= 1;
           updateCounters(map);
         }
-        TaskAttemptID reduceId = new TaskAttemptID(new TaskID(jobId, false, 0),
-                                                   0, timstamp);
+        TaskAttemptID reduceId = 
+          new TaskAttemptID(new TaskID(jobId, false, 0), 0);
         try {
           if (numReduceTasks > 0) {
             // move map output to reduce input  

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskAttemptID.java Sun Oct  5 22:38:50 2008
@@ -22,28 +22,18 @@
 import java.io.DataOutput;
 import java.io.IOException;
 
-import org.apache.hadoop.io.WritableUtils;
-
 /**
  * TaskAttemptID represents the immutable and unique identifier for 
  * a task attempt. Each task attempt is one particular instance of a Map or
  * Reduce Task identified by its TaskID. 
  * 
- * TaskAttemptID consists of 3 parts. First part is the 
+ * TaskAttemptID consists of 2 parts. First part is the 
  * {@link TaskID}, that this TaskAttemptID belongs to.
- * Second part is the task attempt number. Third part is the unique identifier
- * for distinguishing tasks-attempts across jobtracker restarts.<br> 
+ * Second part is the task attempt number. <br> 
  * An example TaskAttemptID is : 
- * <code>attempt_200707121733_0003_m_000005_0_1234567890123</code> , which 
- * represents the zeroth task attempt for the fifth map task in the third job 
- * running at the jobtracker started at <code>200707121733</code> with 
- * timestamp <code>1234567890123</code>. There could be another attempt with id
- * <code>attempt_200707121733_0003_m_000005_0_1234567890124</code> which 
- * indicates that the task was scheduled by the jobtracker started at timestamp
- * <code>1234567890124</code>. <code>200707121733</code> here indicates that 
- * the job was started by the jobtracker that was started at 
- * <code>200707121733</code>, although this task-attempt was scheduled by the 
- * new jobtracker. 
+ * <code>attempt_200707121733_0003_m_000005_0</code> , which represents the
+ * zeroth task attempt for the fifth map task in the third job 
+ * running at the jobtracker started at <code>200707121733</code>.
  * <p>
  * Applications should never construct or parse TaskAttemptID strings
  * , but rather use appropriate constructors or {@link #forName(String)} 
@@ -55,39 +45,19 @@
 public class TaskAttemptID extends ID {
   private static final String ATTEMPT = "attempt";
   private TaskID taskId;
-  private long jtTimestamp = 0;
   private static final char UNDERSCORE = '_';
   
   /**
-   * @deprecated Use {@link #TaskAttemptID(TaskID, int, long)} instead.
-   */
-  public TaskAttemptID(TaskID taskId, int id) {
-    this(taskId, id, 0);
-  }
-  
-  /**
    * Constructs a TaskAttemptID object from given {@link TaskID}.  
    * @param taskId TaskID that this task belongs to  
    * @param id the task attempt number
-   * @param jtTimestamp timestamp that uniquely identifies the task 
-   *        attempt across restarts
    */
-  public TaskAttemptID(TaskID taskId, int id, long jtTimestamp) {
+  public TaskAttemptID(TaskID taskId, int id) {
     super(id);
     if(taskId == null) {
       throw new IllegalArgumentException("taskId cannot be null");
     }
     this.taskId = taskId;
-    this.jtTimestamp = jtTimestamp;
-  }
-  
-  /**
-   * @deprecated 
-   *   Use {@link #TaskAttemptID(String, int, boolean, int, int, long)} instead
-   */
-  public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, 
-                       int taskId, int id) {
-    this(new TaskID(jtIdentifier, jobId, isMap, taskId), id, 0);
   }
   
   /**
@@ -97,13 +67,10 @@
    * @param isMap whether the tip is a map 
    * @param taskId taskId number
    * @param id the task attempt number
-   * @param jtTimestamp timestamp that uniquely identifies the task attempt 
-   *        across restarts
    */
   public TaskAttemptID(String jtIdentifier, int jobId, boolean isMap, 
-                       int taskId, int id, long jtTimestamp) {
-    this(new TaskID(jtIdentifier, jobId, isMap, taskId), id, 
-                    jtTimestamp);
+                       int taskId, int id) {
+    this(new TaskID(jtIdentifier, jobId, isMap, taskId), id);
   }
   
   private TaskAttemptID() { }
@@ -127,10 +94,12 @@
   public boolean equals(Object o) {
     if (!super.equals(o))
       return false;
-
-    TaskAttemptID that = (TaskAttemptID)o;
-    return this.taskId.equals(that.taskId) && 
-           this.jtTimestamp == that.jtTimestamp;
+    if(o.getClass().equals(TaskAttemptID.class)) {
+      TaskAttemptID that = (TaskAttemptID)o;
+      return this.id==that.id
+             && this.taskId.equals(that.taskId);
+    }
+    else return false;
   }
   
   /**Compare TaskIds by first tipIds, then by task numbers. */
@@ -139,12 +108,9 @@
     TaskAttemptID that = (TaskAttemptID)o;
     int tipComp = this.taskId.compareTo(that.taskId);
     if(tipComp == 0) {
-      tipComp = this.id - that.id;
-    }
-    if (tipComp == 0) {
-      tipComp = Long.valueOf(this.jtTimestamp).compareTo(that.jtTimestamp);
+      return this.id - that.id;
     }
-    return tipComp;
+    else return tipComp;
   }
   @Override
   public String toString() { 
@@ -154,13 +120,9 @@
   }
 
   StringBuilder toStringWOPrefix() {
-    // This is just for backward compability.
-    String appendForTimestamp = (jtTimestamp == 0) 
-                                ? "" 
-                                : UNDERSCORE + String.valueOf(jtTimestamp);
     StringBuilder builder = new StringBuilder();
     return builder.append(taskId.toStringWOPrefix())
-                  .append(UNDERSCORE).append(id).append(appendForTimestamp);
+                  .append(UNDERSCORE).append(id);
   }
   
   @Override
@@ -172,14 +134,12 @@
   public void readFields(DataInput in) throws IOException {
     super.readFields(in);
     this.taskId = TaskID.read(in);
-    this.jtTimestamp = WritableUtils.readVLong(in);
   }
 
   @Override
   public void write(DataOutput out) throws IOException {
     super.write(out);
     taskId.write(out);
-    WritableUtils.writeVLong(out, jtTimestamp);
   }
   
   public static TaskAttemptID read(DataInput in) throws IOException {
@@ -197,20 +157,14 @@
       return null;
     try {
       String[] parts = str.split("_");
-      long jtTimestamp = 0;
-      // This is for backward compability
-      if(parts.length == 6 || parts.length == 7) {
+      if(parts.length == 6) {
         if(parts[0].equals(ATTEMPT)) {
           boolean isMap = false;
           if(parts[3].equals("m")) isMap = true;
           else if(parts[3].equals("r")) isMap = false;
           else throw new Exception();
-          if (parts.length == 7) {
-            jtTimestamp = Long.parseLong(parts[6]);
-          }
           return new TaskAttemptID(parts[1], Integer.parseInt(parts[2]),
-                                   isMap, Integer.parseInt(parts[4]), 
-                                   Integer.parseInt(parts[5]), jtTimestamp);
+              isMap, Integer.parseInt(parts[4]), Integer.parseInt(parts[5]));
         }
       }
     }catch (Exception ex) {//fall below
@@ -220,20 +174,6 @@
   }
   
   /** 
-   * @return a regex pattern matching TaskAttemptIDs
-   * @deprecated Use {@link #getTaskAttemptIDsPattern(String, Integer, Boolean,
-   *                                                  Integer, Integer, Long)} 
-   *             instead.
-   */
-  public static String getTaskAttemptIDsPattern(String jtIdentifier,
-      Integer jobId, Boolean isMap, Integer taskId, Integer attemptId) {
-    StringBuilder builder = new StringBuilder(ATTEMPT).append(UNDERSCORE);
-    builder.append(getTaskAttemptIDsPatternWOPrefix(jtIdentifier, jobId,
-                   isMap, taskId, attemptId, null));
-    return builder.toString();
-  }
-  
-  /**
    * Returns a regex pattern which matches task attempt IDs. Arguments can 
    * be given null, in which case that part of the regex will be generic.  
    * For example to obtain a regex matching <i>all task attempt IDs</i> 
@@ -249,23 +189,16 @@
    * @param isMap whether the tip is a map, or null 
    * @param taskId taskId number, or null
    * @param attemptId the task attempt number, or null
-   * @param jtTimestamp Timestamp that is used to identify task attempts across
-   *        jobtracker restarts. Make sure that timestamp has some valid value.
+   * @return a regex pattern matching TaskAttemptIDs
    */
-  public static String getTaskAttemptIDsPattern(String jtIdentifier, 
-      Integer jobId, Boolean isMap, Integer taskId, Integer attemptId, Long jtTimestamp) {
+  public static String getTaskAttemptIDsPattern(String jtIdentifier,
+      Integer jobId, Boolean isMap, Integer taskId, Integer attemptId) {
     StringBuilder builder = new StringBuilder(ATTEMPT).append(UNDERSCORE);
     builder.append(getTaskAttemptIDsPatternWOPrefix(jtIdentifier, jobId,
-                   isMap, taskId, attemptId, jtTimestamp));
+        isMap, taskId, attemptId));
     return builder.toString();
   }
   
-  /**
-   * @deprecated 
-   * Use {@link #getTaskAttemptIDsPatternWOPrefix(String, Integer, Boolean, 
-   *                                              Integer, Integer, Long)} 
-   * instead.
-   */
   static StringBuilder getTaskAttemptIDsPatternWOPrefix(String jtIdentifier
       , Integer jobId, Boolean isMap, Integer taskId, Integer attemptId) {
     StringBuilder builder = new StringBuilder();
@@ -276,15 +209,4 @@
     return builder;
   }
   
-  static StringBuilder getTaskAttemptIDsPatternWOPrefix(String jtIdentifier, 
-      Integer jobId, Boolean isMap, Integer taskId, Integer attemptId, 
-      Long jtTimestamp) {
-    StringBuilder builder = new StringBuilder();
-    builder.append(TaskID.getTaskIDsPatternWOPrefix(jtIdentifier, jobId, isMap, taskId))
-           .append(UNDERSCORE)
-           .append(attemptId != null ? attemptId : "[0-9]*")
-           .append(UNDERSCORE)
-           .append(jtTimestamp != null ? jtTimestamp : "[0-9]*");
-    return builder;
-  }
 }

Modified: hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java (original)
+++ hadoop/core/trunk/src/mapred/org/apache/hadoop/mapred/TaskInProgress.java Sun Oct  5 22:38:50 2008
@@ -54,6 +54,7 @@
   int maxTaskAttempts = 4;    
   static final double SPECULATIVE_GAP = 0.2;
   static final long SPECULATIVE_LAG = 60 * 1000;
+  private static final int NUM_ATTEMPTS_PER_RESTART = 1000;
 
   public static final Log LOG = LogFactory.getLog(TaskInProgress.class);
 
@@ -817,7 +818,9 @@
     // Create the 'taskid'; do not count the 'killed' tasks against the job!
     TaskAttemptID taskid = null;
     if (nextTaskId < (MAX_TASK_EXECS + maxTaskAttempts + numKilledTasks)) {
-      taskid = new TaskAttemptID( id, nextTaskId, jobtracker.getStartTime());
+      // Make sure that the attempts are unqiue across restarts
+      int attemptId = job.numRestarts() * NUM_ATTEMPTS_PER_RESTART + nextTaskId;
+      taskid = new TaskAttemptID( id, attemptId);
       ++nextTaskId;
     } else {
       LOG.warn("Exceeded limit of " + (MAX_TASK_EXECS + maxTaskAttempts) +

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/MiniMRCluster.java Sun Oct  5 22:38:50 2008
@@ -84,10 +84,6 @@
       return tracker;
     }
     
-    public TaskAttemptID getTaskAttemptId(TaskID taskId, int attemptId) {
-      return new TaskAttemptID(taskId, attemptId, tracker.getStartTime());
-    }
-
     /**
      * Create the job tracker and run it.
      */
@@ -301,10 +297,6 @@
     return createJobConf(new JobConf());
   }
 
-  public TaskAttemptID getTaskAttemptId(TaskID taskId, int attemptId) {
-    return this.jobTracker.getTaskAttemptId(taskId, attemptId);
-  }
-
   public JobConf createJobConf(JobConf conf) {
     if(conf == null) {
       conf = new JobConf();
@@ -516,9 +508,8 @@
          getMapTaskCompletionEventsUpdates(int index, JobID jobId, int max) 
   throws IOException {
     String jtId = jobTracker.getJobTracker().getTrackerIdentifier();
-    long jtStart = jobTracker.getJobTracker().getStartTime();
     TaskAttemptID dummy = 
-      new TaskAttemptID(jtId, jobId.getId(), false, 0, 0, jtStart);
+      new TaskAttemptID(jtId, jobId.getId(), false, 0, 0);
     return taskTrackerList.get(index).getTaskTracker()
                                      .getMapCompletionEvents(jobId, 0, max, 
                                                              dummy);

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRMapRedDebugScript.java Sun Oct  5 22:38:50 2008
@@ -163,7 +163,7 @@
 
     JobID jobId = job.getID();
     // construct the task id of first map task of failmap
-    TaskAttemptID taskId = mr.getTaskAttemptId(new TaskID(jobId,true, 0), 0);
+    TaskAttemptID taskId = new TaskAttemptID(new TaskID(jobId,true, 0), 0);
     // wait for the job to finish.
     while (!job.isComplete()) ;
     

Modified: hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java
URL: http://svn.apache.org/viewvc/hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java?rev=701948&r1=701947&r2=701948&view=diff
==============================================================================
--- hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java (original)
+++ hadoop/core/trunk/src/test/org/apache/hadoop/mapred/TestMiniMRWithDFS.java Sun Oct  5 22:38:50 2008
@@ -182,7 +182,7 @@
     // Run a word count example
     // Keeping tasks that match this pattern
     String pattern = 
-      TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null, null);
+      TaskAttemptID.getTaskAttemptIDsPattern(null, null, true, 1, null);
     jobConf.setKeepTaskFilesPattern(pattern);
     TestResult result;
     final Path inDir = new Path("./wc/input");
@@ -192,9 +192,9 @@
     assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
                  "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", result.output);
     JobID jobid = result.job.getID();
-    String taskId = mr.getTaskAttemptId(new TaskID(jobid, true, 1), 0).toString();
+    TaskAttemptID taskid = new TaskAttemptID(new TaskID(jobid, true, 1),0);
     checkTaskDirectories(mr, new String[]{jobid.toString()}, 
-                         new String[]{taskId});
+                         new String[]{taskid.toString()});
     // test with maps=0
     jobConf = mr.createJobConf();
     input = "owen is oom";