You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ac...@apache.org on 2011/04/01 00:23:34 UTC

svn commit: r1087462 [4/20] - in /hadoop/mapreduce/branches/MR-279: ./ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ mr-client/h...

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/StartEndTimesBase.java Thu Mar 31 22:23:22 2011
@@ -29,11 +29,11 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 
 abstract class StartEndTimesBase implements TaskRuntimeEstimator {
   static final float MINIMUM_COMPLETE_PROPORTION_TO_SPECULATE
@@ -44,8 +44,8 @@ abstract class StartEndTimesBase impleme
   protected Configuration conf = null;
   protected AppContext context = null;
 
-  protected final Map<TaskAttemptID, Long> startTimes
-      = new ConcurrentHashMap<TaskAttemptID, Long>();
+  protected final Map<TaskAttemptId, Long> startTimes
+      = new ConcurrentHashMap<TaskAttemptId, Long>();
 
   // XXXX This class design assumes that the contents of AppContext.getAllJobs
   //   never changes.  Is that right?
@@ -71,7 +71,7 @@ abstract class StartEndTimesBase impleme
   }
 
   @Override
-  public long attemptEnrolledTime(TaskAttemptID attemptID) {
+  public long attemptEnrolledTime(TaskAttemptId attemptID) {
     Long result = startTimes.get(attemptID);
 
     return result == null ? Long.MAX_VALUE : result;
@@ -83,9 +83,9 @@ abstract class StartEndTimesBase impleme
     this.conf = conf;
     this.context = context;
 
-    Map<JobID, Job> allJobs = context.getAllJobs();
+    Map<JobId, Job> allJobs = context.getAllJobs();
 
-    for (JobID jobID : allJobs.keySet()) {
+    for (JobId jobID : allJobs.keySet()) {
       final Job job = allJobs.get(jobID);
       mapperStatistics.put(job, new DataStatistics());
       reducerStatistics.put(job, new DataStatistics());
@@ -94,8 +94,8 @@ abstract class StartEndTimesBase impleme
     }
   }
 
-  protected DataStatistics dataStatisticsForTask(TaskID taskID) {
-    JobID jobID = taskID.jobID;
+  protected DataStatistics dataStatisticsForTask(TaskId taskID) {
+    JobId jobID = taskID.getJobId();
     Job job = context.getJob(jobID);
 
     if (job == null) {
@@ -116,11 +116,11 @@ abstract class StartEndTimesBase impleme
   }
 
   @Override
-  public long thresholdRuntime(TaskID taskID) {
-    JobID jobID = taskID.jobID;
+  public long thresholdRuntime(TaskId taskID) {
+    JobId jobID = taskID.getJobId();
     Job job = context.getJob(jobID);
 
-    TaskType type = taskID.taskType;
+    TaskType type = taskID.getTaskType();
 
     DataStatistics statistics
         = dataStatisticsForTask(taskID);
@@ -147,7 +147,7 @@ abstract class StartEndTimesBase impleme
   }
 
   @Override
-  public long estimatedNewAttemptRuntime(TaskID id) {
+  public long estimatedNewAttemptRuntime(TaskId id) {
     DataStatistics statistics = dataStatisticsForTask(id);
 
     if (statistics == null) {
@@ -161,9 +161,9 @@ abstract class StartEndTimesBase impleme
   public void updateAttempt(TaskAttemptStatus status, long timestamp) {
     String stateString = status.stateString.toString();
 
-    TaskAttemptID attemptID = status.id;
-    TaskID taskID = attemptID.taskID;
-    JobID jobID = taskID.jobID;
+    TaskAttemptId attemptID = status.id;
+    TaskId taskID = attemptID.getTaskId();
+    JobId jobID = taskID.getJobId();
     Job job = context.getJob(jobID);
 
     if (job == null) {

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskRuntimeEstimator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskRuntimeEstimator.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskRuntimeEstimator.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskRuntimeEstimator.java Thu Mar 31 22:23:22 2011
@@ -19,17 +19,17 @@
 package org.apache.hadoop.mapreduce.v2.app.speculate;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
 
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
 
 
 public interface TaskRuntimeEstimator {
   public void enrollAttempt(TaskAttemptStatus reportedStatus, long timestamp);
 
-  public long attemptEnrolledTime(TaskAttemptID attemptID);
+  public long attemptEnrolledTime(TaskAttemptId attemptID);
 
   public void updateAttempt(TaskAttemptStatus reportedStatus, long timestamp);
 
@@ -50,7 +50,7 @@ public interface TaskRuntimeEstimator {
    *         however long.
    *
    */
-  public long thresholdRuntime(TaskID id);
+  public long thresholdRuntime(TaskId id);
 
   /**
    *
@@ -62,7 +62,7 @@ public interface TaskRuntimeEstimator {
    *         we don't have enough information yet to produce an estimate.
    *
    */
-  public long estimatedRuntime(TaskAttemptID id);
+  public long estimatedRuntime(TaskAttemptId id);
 
   /**
    *
@@ -74,7 +74,7 @@ public interface TaskRuntimeEstimator {
    *         we don't have enough information yet to produce an estimate.
    *
    */
-  public long estimatedNewAttemptRuntime(TaskID id);
+  public long estimatedNewAttemptRuntime(TaskId id);
 
   /**
    *
@@ -86,5 +86,5 @@ public interface TaskRuntimeEstimator {
    *         we don't have enough information yet to produce an estimate.
    *
    */
-  public long runtimeEstimateVariance(TaskAttemptID id);
+  public long runtimeEstimateVariance(TaskAttemptId id);
 }

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskSpeculationPredicate.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskSpeculationPredicate.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskSpeculationPredicate.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/TaskSpeculationPredicate.java Thu Mar 31 22:23:22 2011
@@ -18,19 +18,20 @@
 
 package org.apache.hadoop.mapreduce.v2.app.speculate;
 
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
+
 
 public class TaskSpeculationPredicate {
-  boolean canSpeculate(AppContext context, TaskID taskID) {
+  boolean canSpeculate(AppContext context, TaskId taskID) {
     // This class rejects speculating any task that already has speculations,
     //  or isn't running.
     //  Subclasses should call TaskSpeculationPredicate.canSpeculate(...) , but
     //  can be even more restrictive.
-    JobID jobID = taskID.jobID;
+    JobId jobID = taskID.getJobId();
     Job job = context.getJob(jobID);
     Task task = job.getTask(taskID);
     return task.getAttempts().size() == 1;

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/taskclean/TaskCleanupEvent.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/taskclean/TaskCleanupEvent.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/taskclean/TaskCleanupEvent.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/taskclean/TaskCleanupEvent.java Thu Mar 31 22:23:22 2011
@@ -20,8 +20,8 @@ package org.apache.hadoop.mapreduce.v2.a
 
 import org.apache.hadoop.mapreduce.OutputCommitter;
 import org.apache.hadoop.mapreduce.TaskAttemptContext;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.yarn.event.AbstractEvent;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
 
 /**
  * This class encapsulates task cleanup event.
@@ -29,11 +29,11 @@ import org.apache.hadoop.mapreduce.v2.ap
  */
 public class TaskCleanupEvent extends AbstractEvent<TaskCleaner.EventType> {
 
-  private final TaskAttemptID attemptID;
+  private final TaskAttemptId attemptID;
   private final OutputCommitter committer;
   private final TaskAttemptContext attemptContext;
 
-  public TaskCleanupEvent(TaskAttemptID attemptID, OutputCommitter committer, 
+  public TaskCleanupEvent(TaskAttemptId attemptID, OutputCommitter committer, 
       TaskAttemptContext attemptContext) {
     super(TaskCleaner.EventType.TASK_CLEAN);
     this.attemptID = attemptID;
@@ -41,7 +41,7 @@ public class TaskCleanupEvent extends Ab
     this.attemptContext = attemptContext;
   }
 
-  public TaskAttemptID getAttemptID() {
+  public TaskAttemptId getAttemptID() {
     return attemptID;
   }
 

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java Thu Mar 31 22:23:22 2011
@@ -18,20 +18,20 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+
 import java.util.Locale;
-import org.apache.commons.lang.StringUtils;
-import com.google.inject.Inject;
 
+import org.apache.commons.lang.StringUtils;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.webapp.Controller;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import com.google.inject.Inject;
 
 public class AppController extends Controller implements AMParams {
   final App app;
@@ -109,7 +109,7 @@ public class AppController extends Contr
       if ($(JOB_ID).isEmpty()) {
         throw new RuntimeException("missing job ID");
       }
-      JobID jobID = MRApps.toJobID($(JOB_ID));
+      JobId jobID = MRApps.toJobID($(JOB_ID));
       app.job = app.context.getJob(jobID);
       if (app.job == null) {
         notFound($(JOB_ID));
@@ -124,10 +124,10 @@ public class AppController extends Contr
       if ($(TASK_ID).isEmpty()) {
         throw new RuntimeException("missing task ID");
       }
-      TaskID taskID = MRApps.toTaskID($(TASK_ID));
-      app.job = app.context.getJob(taskID.jobID);
+      TaskId taskID = MRApps.toTaskID($(TASK_ID));
+      app.job = app.context.getJob(taskID.getJobId());
       if (app.job == null) {
-        notFound(MRApps.toString(taskID.jobID));
+        notFound(MRApps.toString(taskID.getJobId()));
       } else {
         app.task = app.job.getTask(taskID);
         if (app.task == null) {

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/CountersBlock.java Thu Mar 31 22:23:22 2011
@@ -21,6 +21,11 @@ package org.apache.hadoop.mapreduce.v2.a
 import com.google.inject.Inject;
 import java.util.Map;
 
+import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -29,11 +34,6 @@ import org.apache.hadoop.mapreduce.v2.ut
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.mapreduce.v2.api.Counter;
-import org.apache.hadoop.mapreduce.v2.api.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.Counters;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
 
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
@@ -70,17 +70,17 @@ public class CountersBlock extends HtmlB
             th(".group.ui-state-default", "Counter Group").
             th(".ui-state-default", "Counters")._()._().
         tbody();
-    for (CounterGroup g : total.groups.values()) {
-      CounterGroup mg = map == null ? null : map.groups.get(g.name);
-      CounterGroup rg = reduce == null ? null : reduce.groups.get(g.name);
+    for (CounterGroup g : total.getAllCounterGroups().values()) {
+      CounterGroup mg = map == null ? null : map.getCounterGroup(g.getName());
+      CounterGroup rg = reduce == null ? null : reduce.getCounterGroup(g.getName());
       ++numGroups;
       // This is mostly for demonstration :) Typically we'd introduced
       // a CounterGroup block to reduce the verbosity. OTOH, this
       // serves as an indicator of where we're in the tag hierarchy.
       TR<THEAD<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>>> groupHeadRow = tbody.
         tr().
-          th().$title(g.name.toString()).
-            _(fixGroupDisplayName(g.displayname))._().
+          th().$title(g.getName()).
+            _(fixGroupDisplayName(g.getDisplayName()))._().
           td().$class(C_TABLE).
             table(".dt-counters").
               thead().
@@ -92,20 +92,20 @@ public class CountersBlock extends HtmlB
       TBODY<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>> group = groupHeadRow.
             th(map == null ? "Value" : "Total")._()._().
         tbody();
-      for (Counter counter : g.counters.values()) {
+      for (Counter counter : g.getAllCounters().values()) {
         // Ditto
         TR<TBODY<TABLE<TD<TR<TBODY<TABLE<DIV<Hamlet>>>>>>>> groupRow = group.
           tr().
-            td().$title(counter.name.toString()).
-              _(counter.displayName.toString())._();
+            td().$title(counter.getName()).
+              _(counter.getDisplayName())._();
         if (map != null) {
-          Counter mc = mg == null ? null : mg.counters.get(counter.name);
-          Counter rc = rg == null ? null : rg.counters.get(counter.name);
+          Counter mc = mg == null ? null : mg.getCounter(counter.getName());
+          Counter rc = rg == null ? null : rg.getCounter(counter.getName());
           groupRow.
-            td(mc == null ? "0" : String.valueOf(mc.value)).
-            td(rc == null ? "0" : String.valueOf(rc.value));
+            td(mc == null ? "0" : String.valueOf(mc.getValue())).
+            td(rc == null ? "0" : String.valueOf(rc.getValue()));
         }
-        groupRow.td(String.valueOf(counter.value))._();
+        groupRow.td(String.valueOf(counter.getValue()))._();
       }
       group._()._()._()._();
     }
@@ -113,12 +113,12 @@ public class CountersBlock extends HtmlB
   }
 
   private void getCounters(AppContext ctx) {
-    JobID jobID = null;
-    TaskID taskID = null;
+    JobId jobID = null;
+    TaskId taskID = null;
     String tid = $(TASK_ID);
     if (!tid.isEmpty()) {
       taskID = MRApps.toTaskID(tid);
-      jobID = taskID.jobID;
+      jobID = taskID.getJobId();
     } else {
       String jid = $(JOB_ID);
       if (!jid.isEmpty()) {
@@ -141,7 +141,7 @@ public class CountersBlock extends HtmlB
       return;
     }
     // Get all types of counters
-    Map<TaskID, Task> tasks = job.getTasks();
+    Map<TaskId, Task> tasks = job.getTasks();
     total = JobImpl.newCounters();
     map = JobImpl.newCounters();
     reduce = JobImpl.newCounters();

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java Thu Mar 31 22:23:22 2011
@@ -22,9 +22,9 @@ import com.google.inject.Inject;
 import java.util.Date;
 import java.util.Map;
 
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -54,7 +54,7 @@ public class JobBlock extends HtmlBlock 
         p()._("Sorry, can't do anything without a JobID.")._();
       return;
     }
-    JobID jobID = MRApps.toJobID(jid);
+    JobId jobID = MRApps.toJobID(jid);
     Job job = appContext.getJob(jobID);
     if (job == null) {
       html.
@@ -62,8 +62,8 @@ public class JobBlock extends HtmlBlock 
       return;
     }
     JobReport jobReport = job.getReport();
-    String mapPct = percent(jobReport.mapProgress);
-    String reducePct = percent(jobReport.reduceProgress);
+    String mapPct = percent(jobReport.getMapProgress());
+    String reducePct = percent(jobReport.getReduceProgress());
     int maps = job.getTotalMaps();
     int mapsComplete = job.getCompletedMaps();
     int reduces = job.getTotalReduces();
@@ -72,9 +72,9 @@ public class JobBlock extends HtmlBlock 
     info("Job Overview").
         _("Job Name:", job.getName()).
         _("State:", job.getState()).
-        _("Started:", new Date(jobReport.startTime)).
+        _("Started:", new Date(jobReport.getStartTime())).
         _("Elapsed:", StringUtils.formatTime(System.currentTimeMillis()
-                                             - jobReport.startTime));
+                                             - jobReport.getStartTime()));
     html.
       _(InfoBlock.class).
       div(_INFO_WRAP).
@@ -113,7 +113,7 @@ public class JobBlock extends HtmlBlock 
   }
 
   private void countTasks(Job job) {
-    Map<TaskID, Task> tasks = job.getTasks();
+    Map<TaskId, Task> tasks = job.getTasks();
     for (Task task : tasks.values()) {
       switch (task.getType()) {
         case MAP: switch (task.getState()) {

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java Thu Mar 31 22:23:22 2011
@@ -20,13 +20,13 @@ package org.apache.hadoop.mapreduce.v2.a
 
 import com.google.inject.Inject;
 
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
 
 import static org.apache.hadoop.yarn.util.StringHelper.*;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
@@ -57,16 +57,16 @@ public class JobsBlock extends HtmlBlock
     for (Job job : appContext.getAllJobs().values()) {
       String jobID = MRApps.toString(job.getID());
       JobReport report = job.getReport();
-      String mapPct = percent(report.mapProgress);
+      String mapPct = percent(report.getMapProgress());
       String mapsTotal = String.valueOf(job.getTotalMaps());
       String mapsCompleted = String.valueOf(job.getCompletedMaps());
-      String reducePct = percent(report.reduceProgress);
+      String reducePct = percent(report.getReduceProgress());
       String reduceTotal = String.valueOf(job.getTotalReduces());
       String reduceCompleted = String.valueOf(job.getCompletedReduces());
       tbody.
         tr().
           td().
-            span().$title(String.valueOf(job.getID().id))._(). // for sorting
+            span().$title(String.valueOf(job.getID().getId()))._(). // for sorting
             a(url("job", jobID), jobID)._().
           td(job.getName().toString()).
           td(job.getState().toString()).

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java Thu Mar 31 22:23:22 2011
@@ -21,13 +21,13 @@ package org.apache.hadoop.mapreduce.v2.a
 import com.google.common.base.Joiner;
 import com.google.inject.Inject;
 
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
 
 import static org.apache.hadoop.yarn.util.StringHelper.*;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
@@ -67,15 +67,15 @@ class TaskPage extends AppView {
         String progress = percent(ta.getProgress());
         String node = ta.getAssignedContainerMgrAddress();
         TaskAttemptReport report = ta.getReport();
-        long elapsed = Times.elapsed(report.startTime, report.finishTime);
+        long elapsed = Times.elapsed(report.getStartTime(), report.getFinishTime());
         tbody.
           tr().
             td(".id", taid).
             td(".progress", progress).
             td(".state", ta.getState().toString()).
             td(".node", node).
-            td(".ts", String.valueOf(report.startTime)).
-            td(".ts", String.valueOf(report.finishTime)).
+            td(".ts", String.valueOf(report.getStartTime())).
+            td(".ts", String.valueOf(report.getFinishTime())).
             td(".dt", String.valueOf(elapsed)).
             td(".note", Joiner.on('\n').join(ta.getDiagnostics()))._();
       }

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java Thu Mar 31 22:23:22 2011
@@ -22,6 +22,8 @@ import com.google.inject.Inject;
 import java.text.SimpleDateFormat;
 import java.util.Date;
 
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.util.StringUtils;
@@ -29,8 +31,6 @@ import org.apache.hadoop.yarn.webapp.ham
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 
-import org.apache.hadoop.mapreduce.v2.api.TaskReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
 
 import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
 import static org.apache.hadoop.yarn.util.StringHelper.*;
@@ -73,12 +73,12 @@ public class TasksBlock extends HtmlBloc
       }
       String tid = MRApps.toString(task.getID());
       TaskReport report = task.getReport();
-      String pct = percent(report.progress);
-      long elapsed = Times.elapsed(report.startTime, report.finishTime);
+      String pct = percent(report.getProgress());
+      long elapsed = Times.elapsed(report.getStartTime(), report.getFinishTime());
       tbody.
         tr().
           td().
-            br().$title(String.valueOf(task.getID().id))._(). // sorting
+            br().$title(String.valueOf(task.getID().getId()))._(). // sorting
             a(url("task", tid), tid)._().
           td().
             br().$title(pct)._().
@@ -86,13 +86,13 @@ public class TasksBlock extends HtmlBloc
               $title(join(pct, '%')). // tooltip
               div(_PROGRESSBAR_VALUE).
                 $style(join("width:", pct, '%'))._()._()._().
-          td(report.state.toString()).
+          td(report.getTaskState().toString()).
           td().
-            br().$title(String.valueOf(report.startTime))._().
-            _(dateFormat.format(new Date(report.startTime)))._().
+            br().$title(String.valueOf(report.getStartTime()))._().
+            _(dateFormat.format(new Date(report.getStartTime())))._().
           td().
-            br().$title(String.valueOf(report.finishTime))._().
-            _(dateFormat.format(new Date(report.finishTime)))._().
+            br().$title(String.valueOf(report.getFinishTime()))._().
+            _(dateFormat.format(new Date(report.getFinishTime())))._().
           td().
             br().$title(String.valueOf(elapsed))._().
             _(StringUtils.formatTime(elapsed))._()._();

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Thu Mar 31 22:23:22 2011
@@ -30,6 +30,14 @@ import org.apache.hadoop.mapred.WrappedJ
 import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
 import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.YarnMRJobConfig;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MRAppMaster;
@@ -52,19 +60,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
 import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
 import org.apache.hadoop.security.Credentials;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.state.StateMachine;
 import org.apache.hadoop.yarn.state.StateMachineFactory;
-import org.apache.hadoop.yarn.ApplicationID;
-import org.apache.hadoop.yarn.ContainerID;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
+
 
 /**
  * Mock MRAppMaster. Doesn't start RPC servers.
@@ -75,11 +78,14 @@ public class MRApp extends MRAppMaster {
   int maps;
   int reduces;
 
+  private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+  
   //if true tasks complete automatically as soon as they are launched
   protected boolean autoComplete = false;
 
   public MRApp(int maps, int reduces, boolean autoComplete) {
-    super(new ApplicationID());
+    
+    super(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class));
     this.maps = maps;
     this.reduces = reduces;
     this.autoComplete = autoComplete;
@@ -102,49 +108,49 @@ public class MRApp extends MRAppMaster {
       TaskAttemptState finalState) throws Exception {
     int timeoutSecs = 0;
     TaskAttemptReport report = attempt.getReport();
-    while (!finalState.equals(report.state) &&
+    while (!finalState.equals(report.getTaskAttemptState()) &&
         timeoutSecs++ < 20) {
-      System.out.println("TaskAttempt State is : " + report.state +
+      System.out.println("TaskAttempt State is : " + report.getTaskAttemptState() +
           " Waiting for state : " + finalState +
-          "   progress : " + report.progress);
+          "   progress : " + report.getProgress());
       report = attempt.getReport();
       Thread.sleep(500);
     }
-    System.out.println("TaskAttempt State is : " + report.state);
+    System.out.println("TaskAttempt State is : " + report.getTaskAttemptState());
     Assert.assertEquals("TaskAttempt state is not correct (timedout)",
         finalState, 
-        report.state);
+        report.getTaskAttemptState());
   }
 
   public void waitForState(Task task, TaskState finalState) throws Exception {
     int timeoutSecs = 0;
     TaskReport report = task.getReport();
-    while (!finalState.equals(report.state) &&
+    while (!finalState.equals(report.getTaskState()) &&
         timeoutSecs++ < 20) {
-      System.out.println("Task State is : " + report.state +
+      System.out.println("Task State is : " + report.getTaskState() +
           " Waiting for state : " + finalState +
-          "   progress : " + report.progress);
+          "   progress : " + report.getProgress());
       report = task.getReport();
       Thread.sleep(500);
     }
-    System.out.println("Task State is : " + report.state);
+    System.out.println("Task State is : " + report.getTaskState());
     Assert.assertEquals("Task state is not correct (timedout)", finalState, 
-        report.state);
+        report.getTaskState());
   }
 
   public void waitForState(Job job, JobState finalState) throws Exception {
     int timeoutSecs = 0;
     JobReport report = job.getReport();
-    while (!finalState.equals(report.state) &&
+    while (!finalState.equals(report.getJobState()) &&
         timeoutSecs++ < 20) {
-      System.out.println("Job State is : " + report.state +
+      System.out.println("Job State is : " + report.getJobState() +
           " Waiting for state : " + finalState +
-          "   map progress : " + report.mapProgress + 
-          "   reduce progress : " + report.reduceProgress);
+          "   map progress : " + report.getMapProgress() + 
+          "   reduce progress : " + report.getReduceProgress());
       report = job.getReport();
       Thread.sleep(500);
     }
-    System.out.println("Job State is : " + report.state);
+    System.out.println("Job State is : " + report.getJobState());
     Assert.assertEquals("Job state is not correct (timedout)", finalState, 
         job.getState());
   }
@@ -153,17 +159,17 @@ public class MRApp extends MRAppMaster {
     for (Job job : getContext().getAllJobs().values()) {
       JobReport jobReport = job.getReport();
       Assert.assertTrue("Job start time is  not less than finish time",
-          jobReport.startTime < jobReport.finishTime);
+          jobReport.getStartTime() < jobReport.getFinishTime());
       Assert.assertTrue("Job finish time is in future",
-          jobReport.finishTime < System.currentTimeMillis());
+          jobReport.getFinishTime() < System.currentTimeMillis());
       for (Task task : job.getTasks().values()) {
         TaskReport taskReport = task.getReport();
         Assert.assertTrue("Task start time is  not less than finish time",
-            taskReport.startTime < taskReport.finishTime);
+            taskReport.getStartTime() < taskReport.getFinishTime());
         for (TaskAttempt attempt : task.getAttempts().values()) {
           TaskAttemptReport attemptReport = attempt.getReport();
           Assert.assertTrue("Attempt start time is  not less than finish time",
-              attemptReport.startTime < attemptReport.finishTime);
+              attemptReport.getStartTime() < attemptReport.getFinishTime());
         }
       }
     }
@@ -198,10 +204,10 @@ public class MRApp extends MRAppMaster {
         return null;
       }
       @Override
-      public void register(TaskAttemptID attemptID, 
+      public void register(TaskAttemptId attemptID, 
           org.apache.hadoop.mapred.Task task, WrappedJvmID jvmID) {}
       @Override
-      public void unregister(TaskAttemptID attemptID, WrappedJvmID jvmID) {
+      public void unregister(TaskAttemptId attemptID, WrappedJvmID jvmID) {
       }
     };
   }
@@ -231,7 +237,7 @@ public class MRApp extends MRAppMaster {
     }
   }
 
-  protected void attemptLaunched(TaskAttemptID attemptID) {
+  protected void attemptLaunched(TaskAttemptId attemptID) {
     if (autoComplete) {
       // send the done event
       getContext().getEventHandler().handle(
@@ -247,9 +253,9 @@ public class MRApp extends MRAppMaster {
       private int containerCount;
       @Override
       public void handle(ContainerAllocatorEvent event) {
-        ContainerID cId = new ContainerID();
-        cId.appID = getContext().getApplicationID();
-        cId.id = containerCount++;
+        ContainerId cId = recordFactory.newRecordInstance(ContainerId.class);
+        cId.setAppId(getContext().getApplicationID());
+        cId.setId(containerCount++);
         getContext().getEventHandler().handle(
             new TaskAttemptContainerAssignedEvent(event.getAttemptID(), cId,
                 "dummy", null));
@@ -304,7 +310,7 @@ public class MRApp extends MRAppMaster {
       return localStateMachine;
     }
 
-    public TestJob(ApplicationID appID, EventHandler eventHandler,
+    public TestJob(ApplicationId appID, EventHandler eventHandler,
         TaskAttemptListener taskAttemptListener) {
       super(appID, new Configuration(), eventHandler, taskAttemptListener,
           new JobTokenSecretManager(), new Credentials());
@@ -333,7 +339,7 @@ public class MRApp extends MRAppMaster {
       job.remoteJobConfFile = new Path("test");
     }
     @Override
-    protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobID jobId) {
+    protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
       TaskSplitMetaInfo[] splits = new TaskSplitMetaInfo[maps];
       for (int i = 0; i < maps ; i++) {
         splits[i] = new TaskSplitMetaInfo();

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java Thu Mar 31 22:23:22 2011
@@ -22,6 +22,8 @@ import java.util.concurrent.BlockingQueu
 import java.util.concurrent.LinkedBlockingQueue;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -29,13 +31,12 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.service.AbstractService;
 import org.apache.log4j.Level;
 import org.apache.log4j.LogManager;
 import org.apache.log4j.Logger;
-import org.apache.hadoop.yarn.ContainerID;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
 
 public class MRAppBenchmark {
 
@@ -47,7 +48,7 @@ public class MRAppBenchmark {
     rootLogger.setLevel(Level.WARN);
     long startTime = System.currentTimeMillis();
     Job job = app.submit(new Configuration());
-    while (!job.getReport().state.equals(JobState.SUCCEEDED)) {
+    while (!job.getReport().getJobState().equals(JobState.SUCCEEDED)) {
       printStat(job, startTime);
       Thread.sleep(2000);
     }
@@ -80,7 +81,7 @@ public class MRAppBenchmark {
     }
     
     @Override
-    protected void attemptLaunched(TaskAttemptID attemptID) {
+    protected void attemptLaunched(TaskAttemptId attemptID) {
       super.attemptLaunched(attemptID);
       //the task is launched and sends done immediately
       concurrentRunningTasks--;
@@ -119,9 +120,9 @@ public class MRAppBenchmark {
               try {
                 if (concurrentRunningTasks < maxConcurrentRunningTasks) {
                   event = eventQueue.take();
-                  ContainerID cId = new ContainerID();
-                  cId.appID = getContext().getApplicationID();
-                  cId.id = containerCount++;
+                  ContainerId cId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ContainerId.class);
+                  cId.setAppId(getContext().getApplicationID());
+                  cId.setId(containerCount++);
                   //System.out.println("Allocating " + containerCount);
                   getContext().getEventHandler().handle(
                       new TaskAttemptContainerAssignedEvent(event

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java Thu Mar 31 22:23:22 2011
@@ -28,27 +28,30 @@ import java.util.Map;
 import org.apache.hadoop.mapreduce.FileSystemCounter;
 import org.apache.hadoop.mapreduce.JobCounter;
 import org.apache.hadoop.mapreduce.TaskCounter;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.Phase;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
 import org.apache.hadoop.yarn.MockApps;
-import org.apache.hadoop.yarn.ApplicationID;
-import org.apache.hadoop.yarn.ContainerID;
-import org.apache.hadoop.mapreduce.v2.api.Counters;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.Phase;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+
 
 public class MockJobs extends MockApps {
   static final Iterator<JobState> JOB_STATES = Iterators.cycle(
@@ -76,15 +79,16 @@ public class MockJobs extends MockApps {
   static final Iterator<String> DIAGS = Iterators.cycle(
       "Error: java.lang.OutOfMemoryError: Java heap space",
       "Lost task tracker: tasktracker.domain/127.0.0.1:40879");
+  static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 
   public static String newJobName() {
     return newAppName();
   }
 
-  public static Map<JobID, Job> newJobs(ApplicationID appID, int numJobsPerApp,
+  public static Map<JobId, Job> newJobs(ApplicationId appID, int numJobsPerApp,
                                         int numTasksPerJob,
                                         int numAttemptsPerTask) {
-    Map<JobID, Job> map = Maps.newHashMap();
+    Map<JobId, Job> map = Maps.newHashMap();
     for (int j = 0; j < numJobsPerApp; ++j) {
       Job job = newJob(appID, j, numTasksPerJob, numAttemptsPerTask);
       map.put(job.getID(), job);
@@ -92,50 +96,44 @@ public class MockJobs extends MockApps {
     return map;
   }
 
-  public static JobID newJobID(ApplicationID appID, int i) {
-    JobID id = new JobID();
-    id.appID = appID;
-    id.id = i;
+  public static JobId newJobID(ApplicationId appID, int i) {
+    JobId id = recordFactory.newRecordInstance(JobId.class);
+    id.setAppId(appID);
+    id.setId(i);
     return id;
   }
 
-  public static JobReport newJobReport(JobID id) {
-    JobReport report = new JobReport();
-    report.id = id;
-    report.startTime = System.currentTimeMillis()
-        - (int)(Math.random() * 1000000);
-    report.finishTime = System.currentTimeMillis()
-        + (int)(Math.random() * 1000000) + 1;
-    report.mapProgress = (float)Math.random();
-    report.reduceProgress = (float)Math.random();
-    report.state = JOB_STATES.next();
+  public static JobReport newJobReport(JobId id) {
+    JobReport report = recordFactory.newRecordInstance(JobReport.class);
+    report.setJobId(id);
+    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * 1000000));
+    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * 1000000) + 1);
+    report.setMapProgress((float)Math.random());
+    report.setReduceProgress((float)Math.random());
+    report.setJobState(JOB_STATES.next());
     return report;
   }
 
-  public static TaskReport newTaskReport(TaskID id) {
-    TaskReport report = new TaskReport();
-    report.id = id;
-    report.startTime = System.currentTimeMillis()
-        - (int)(Math.random() * 1000000);
-    report.finishTime = System.currentTimeMillis()
-        + (int)(Math.random() * 1000000) + 1;
-    report.progress = (float)Math.random();
-    report.counters = newCounters();
-    report.state = TASK_STATES.next();
+  public static TaskReport newTaskReport(TaskId id) {
+    TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
+    report.setTaskId(id);
+    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * 1000000));
+    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * 1000000) + 1);
+    report.setProgress((float)Math.random());
+    report.setCounters(newCounters());
+    report.setTaskState(TASK_STATES.next());
     return report;
   }
 
-  public static TaskAttemptReport newTaskAttemptReport(TaskAttemptID id) {
-    TaskAttemptReport report = new TaskAttemptReport();
-    report.id = id;
-    report.startTime = System.currentTimeMillis()
-        - (int)(Math.random() * 1000000);
-    report.finishTime = System.currentTimeMillis()
-        + (int)(Math.random() * 1000000) + 1;
-    report.phase = PHASES.next();
-    report.state = TASK_ATTEMPT_STATES.next();
-    report.progress = (float)Math.random();
-    report.counters = newCounters();
+  public static TaskAttemptReport newTaskAttemptReport(TaskAttemptId id) {
+    TaskAttemptReport report = recordFactory.newRecordInstance(TaskAttemptReport.class);
+    report.setTaskAttemptId(id);
+    report.setStartTime(System.currentTimeMillis() - (int)(Math.random() * 1000000));
+    report.setFinishTime(System.currentTimeMillis() + (int)(Math.random() * 1000000) + 1);
+    report.setPhase(PHASES.next());
+    report.setTaskAttemptState(TASK_ATTEMPT_STATES.next());
+    report.setProgress((float)Math.random());
+    report.setCounters(newCounters());
     return report;
   }
 
@@ -163,9 +161,9 @@ public class MockJobs extends MockApps {
     return TypeConverter.toYarn(hc);
   }
 
-  public static Map<TaskAttemptID, TaskAttempt> newTaskAttempts(TaskID tid,
+  public static Map<TaskAttemptId, TaskAttempt> newTaskAttempts(TaskId tid,
                                                                 int m) {
-    Map<TaskAttemptID, TaskAttempt> map = Maps.newHashMap();
+    Map<TaskAttemptId, TaskAttempt> map = Maps.newHashMap();
     for (int i = 0; i < m; ++i) {
       TaskAttempt ta = newTaskAttempt(tid, i);
       map.put(ta.getID(), ta);
@@ -173,16 +171,16 @@ public class MockJobs extends MockApps {
     return map;
   }
 
-  public static TaskAttempt newTaskAttempt(TaskID tid, int i) {
-    final TaskAttemptID taid = new TaskAttemptID();
-    taid.taskID = tid;
-    taid.id = i;
+  public static TaskAttempt newTaskAttempt(TaskId tid, int i) {
+    final TaskAttemptId taid = recordFactory.newRecordInstance(TaskAttemptId.class);
+    taid.setTaskId(tid);
+    taid.setId(i);
     final TaskAttemptReport report = newTaskAttemptReport(taid);
-    final List<CharSequence> diags = Lists.newArrayList();
+    final List<String> diags = Lists.newArrayList();
     diags.add(DIAGS.next());
     return new TaskAttempt() {
       @Override
-      public TaskAttemptID getID() {
+      public TaskAttemptId getID() {
         return taid;
       }
 
@@ -203,22 +201,22 @@ public class MockJobs extends MockApps {
 
       @Override
       public Counters getCounters() {
-        return report.counters;
+        return report.getCounters();
       }
 
       @Override
       public float getProgress() {
-        return report.progress;
+        return report.getProgress();
       }
 
       @Override
       public TaskAttemptState getState() {
-        return report.state;
+        return report.getTaskAttemptState();
       }
 
       @Override
       public boolean isFinished() {
-        switch (report.state) {
+        switch (report.getTaskAttemptState()) {
           case SUCCEEDED:
           case FAILED:
           case KILLED: return true;
@@ -227,9 +225,9 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public ContainerID getAssignedContainerID() {
-        ContainerID id = new ContainerID();
-        id.appID = taid.taskID.jobID.appID;
+      public ContainerId getAssignedContainerID() {
+        ContainerId id = recordFactory.newRecordInstance(ContainerId.class);
+        id.setAppId(taid.getTaskId().getJobId().getAppId());
         return id;
       }
 
@@ -239,14 +237,14 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public List<CharSequence> getDiagnostics() {
+      public List<String> getDiagnostics() {
         return diags;
       }
     };
   }
 
-  public static Map<TaskID, Task> newTasks(JobID jid, int n, int m) {
-    Map<TaskID, Task> map = Maps.newHashMap();
+  public static Map<TaskId, Task> newTasks(JobId jid, int n, int m) {
+    Map<TaskId, Task> map = Maps.newHashMap();
     for (int i = 0; i < n; ++i) {
       Task task = newTask(jid, i, m);
       map.put(task.getID(), task);
@@ -254,16 +252,16 @@ public class MockJobs extends MockApps {
     return map;
   }
 
-  public static Task newTask(JobID jid, int i, int m) {
-    final TaskID tid = new TaskID();
-    tid.jobID = jid;
-    tid.id = i;
-    tid.taskType = TASK_TYPES.next();
+  public static Task newTask(JobId jid, int i, int m) {
+    final TaskId tid = recordFactory.newRecordInstance(TaskId.class);
+    tid.setJobId(jid);
+    tid.setId(i);
+    tid.setTaskType(TASK_TYPES.next());
     final TaskReport report = newTaskReport(tid);
-    final Map<TaskAttemptID, TaskAttempt> attempts = newTaskAttempts(tid, m);
+    final Map<TaskAttemptId, TaskAttempt> attempts = newTaskAttempts(tid, m);
     return new Task() {
       @Override
-      public TaskID getID() {
+      public TaskId getID() {
         return tid;
       }
 
@@ -274,32 +272,32 @@ public class MockJobs extends MockApps {
 
       @Override
       public Counters getCounters() {
-        return report.counters;
+        return report.getCounters();
       }
 
       @Override
       public float getProgress() {
-        return report.progress;
+        return report.getProgress();
       }
 
       @Override
       public TaskType getType() {
-        return tid.taskType;
+        return tid.getTaskType();
       }
 
       @Override
-      public Map<TaskAttemptID, TaskAttempt> getAttempts() {
+      public Map<TaskAttemptId, TaskAttempt> getAttempts() {
         return attempts;
       }
 
       @Override
-      public TaskAttempt getAttempt(TaskAttemptID attemptID) {
+      public TaskAttempt getAttempt(TaskAttemptId attemptID) {
         return attempts.get(attemptID);
       }
 
       @Override
       public boolean isFinished() {
-        switch (report.state) {
+        switch (report.getTaskState()) {
           case SUCCEEDED:
           case KILLED:
           case FAILED: return true;
@@ -308,13 +306,13 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public boolean canCommit(TaskAttemptID taskAttemptID) {
+      public boolean canCommit(TaskAttemptId taskAttemptID) {
         return false;
       }
 
       @Override
       public TaskState getState() {
-        return report.state;
+        return report.getTaskState();
       }
     };
   }
@@ -355,27 +353,27 @@ public class MockJobs extends MockApps {
     return tc;
   }
 
-  public static Job newJob(ApplicationID appID, int i, int n, int m) {
-    final JobID id = newJobID(appID, i);
+  public static Job newJob(ApplicationId appID, int i, int n, int m) {
+    final JobId id = newJobID(appID, i);
     final String name = newJobName();
     final JobReport report = newJobReport(id);
-    final Map<TaskID, Task> tasks = newTasks(id, n, m);
+    final Map<TaskId, Task> tasks = newTasks(id, n, m);
     final TaskCount taskCount = getTaskCount(tasks.values());
     final Counters counters = getCounters(tasks.values());
     return new Job() {
       @Override
-      public JobID getID() {
+      public JobId getID() {
         return id;
       }
 
       @Override
-      public CharSequence getName() {
+      public String getName() {
         return name;
       }
 
       @Override
       public JobState getState() {
-        return report.state;
+        return report.getJobState();
       }
 
       @Override
@@ -389,12 +387,12 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public Map<TaskID, Task> getTasks() {
+      public Map<TaskId, Task> getTasks() {
         return tasks;
       }
 
       @Override
-      public Task getTask(TaskID taskID) {
+      public Task getTask(TaskId taskID) {
         return tasks.get(taskID);
       }
 
@@ -425,7 +423,7 @@ public class MockJobs extends MockApps {
       }
 
       @Override
-      public Map<TaskID, Task> getTasks(TaskType taskType) {
+      public Map<TaskId, Task> getTasks(TaskType taskType) {
         throw new UnsupportedOperationException("Not supported yet.");
       }
 

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java Thu Mar 31 22:23:22 2011
@@ -26,6 +26,11 @@ import junit.framework.Assert;
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapred.TaskAttemptListenerImpl;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.TaskAttemptListener;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -33,11 +38,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
 import org.junit.Test;
 
 /**
@@ -53,13 +53,13 @@ public class TestFail {
     MRApp app = new MockFirstFailingAttemptMRApp(1, 0);
     Job job = app.submit(new Configuration());
     app.waitForState(job, JobState.SUCCEEDED);
-    Map<TaskID,Task> tasks = job.getTasks();
+    Map<TaskId,Task> tasks = job.getTasks();
     Assert.assertEquals("No of tasks is not correct", 1, 
         tasks.size());
     Task task = tasks.values().iterator().next();
     Assert.assertEquals("Task state not correct", TaskState.SUCCEEDED, 
-        task.getReport().state);
-    Map<TaskAttemptID, TaskAttempt> attempts = 
+        task.getReport().getTaskState());
+    Map<TaskAttemptId, TaskAttempt> attempts = 
       tasks.values().iterator().next().getAttempts();
     Assert.assertEquals("No of attempts is not correct", 2, 
         attempts.size());
@@ -67,9 +67,9 @@ public class TestFail {
     //and another must have succeeded
     Iterator<TaskAttempt> it = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED, 
-          it.next().getReport().state);
+          it.next().getReport().getTaskAttemptState());
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.SUCCEEDED, 
-        it.next().getReport().state);
+        it.next().getReport().getTaskAttemptState());
   }
 
   @Test
@@ -143,19 +143,19 @@ public class TestFail {
     conf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, maxAttempts);
     Job job = app.submit(conf);
     app.waitForState(job, JobState.FAILED);
-    Map<TaskID,Task> tasks = job.getTasks();
+    Map<TaskId,Task> tasks = job.getTasks();
     Assert.assertEquals("No of tasks is not correct", 1, 
         tasks.size());
     Task task = tasks.values().iterator().next();
     Assert.assertEquals("Task state not correct", TaskState.FAILED, 
-        task.getReport().state);
-    Map<TaskAttemptID, TaskAttempt> attempts = 
+        task.getReport().getTaskState());
+    Map<TaskAttemptId, TaskAttempt> attempts = 
       tasks.values().iterator().next().getAttempts();
     Assert.assertEquals("No of attempts is not correct", maxAttempts, 
         attempts.size());
     for (TaskAttempt attempt : attempts.values()) {
       Assert.assertEquals("Attempt state not correct", TaskAttemptState.FAILED, 
-          attempt.getReport().state);
+          attempt.getReport().getTaskAttemptState());
     }
   }
 
@@ -189,8 +189,8 @@ public class TestFail {
     }
 
     @Override
-    protected void attemptLaunched(TaskAttemptID attemptID) {
-      if (attemptID.taskID.id == 0) {//check if it is first task
+    protected void attemptLaunched(TaskAttemptId attemptID) {
+      if (attemptID.getTaskId().getId() == 0) {//check if it is first task
         // send the Fail event
         getContext().getEventHandler().handle(
             new TaskAttemptEvent(attemptID, 
@@ -210,8 +210,8 @@ public class TestFail {
     }
 
     @Override
-    protected void attemptLaunched(TaskAttemptID attemptID) {
-      if (attemptID.taskID.id == 0 && attemptID.id == 0) {
+    protected void attemptLaunched(TaskAttemptId attemptID) {
+      if (attemptID.getTaskId().getId() == 0 && attemptID.getId() == 0) {
         //check if it is first task's first attempt
         // send the Fail event
         getContext().getEventHandler().handle(

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFetchFailure.java Thu Mar 31 22:23:22 2011
@@ -24,18 +24,18 @@ import java.util.Iterator;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEventStatus;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
 import org.apache.hadoop.mapreduce.v2.app.job.event.JobTaskAttemptFetchFailureEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEventStatus;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
 import org.junit.Test;
 
 public class TestFetchFailure {
@@ -71,7 +71,7 @@ public class TestFetchFailure {
     Assert.assertEquals("No of completion events not correct",
         1, events.length);
     Assert.assertEquals("Event status not correct",
-        TaskAttemptCompletionEventStatus.SUCCEEDED, events[0].status);
+        TaskAttemptCompletionEventStatus.SUCCEEDED, events[0].getStatus());
     
     // wait for reduce to start running
     app.waitForState(reduceTask, TaskState.RUNNING);
@@ -116,27 +116,27 @@ public class TestFetchFailure {
     
     //previous completion event now becomes obsolete
     Assert.assertEquals("Event status not correct",
-        TaskAttemptCompletionEventStatus.OBSOLETE, events[0].status);
+        TaskAttemptCompletionEventStatus.OBSOLETE, events[0].getStatus());
     
     events = job.getTaskAttemptCompletionEvents(0, 100);
     Assert.assertEquals("No of completion events not correct",
         4, events.length);
     Assert.assertEquals("Event map attempt id not correct",
-        mapAttempt1.getID(), events[0].attemptId);
+        mapAttempt1.getID(), events[0].getAttemptId());
     Assert.assertEquals("Event map attempt id not correct",
-        mapAttempt1.getID(), events[1].attemptId);
+        mapAttempt1.getID(), events[1].getAttemptId());
     Assert.assertEquals("Event map attempt id not correct",
-        mapAttempt2.getID(), events[2].attemptId);
+        mapAttempt2.getID(), events[2].getAttemptId());
     Assert.assertEquals("Event redude attempt id not correct",
-        reduceAttempt.getID(), events[3].attemptId);
+        reduceAttempt.getID(), events[3].getAttemptId());
     Assert.assertEquals("Event status not correct for map attempt1",
-        TaskAttemptCompletionEventStatus.OBSOLETE, events[0].status);
+        TaskAttemptCompletionEventStatus.OBSOLETE, events[0].getStatus());
     Assert.assertEquals("Event status not correct for map attempt1",
-        TaskAttemptCompletionEventStatus.FAILED, events[1].status);
+        TaskAttemptCompletionEventStatus.FAILED, events[1].getStatus());
     Assert.assertEquals("Event status not correct for map attempt2",
-        TaskAttemptCompletionEventStatus.SUCCEEDED, events[2].status);
+        TaskAttemptCompletionEventStatus.SUCCEEDED, events[2].getStatus());
     Assert.assertEquals("Event status not correct for reduce attempt1",
-        TaskAttemptCompletionEventStatus.SUCCEEDED, events[3].status);
+        TaskAttemptCompletionEventStatus.SUCCEEDED, events[3].getStatus());
   }
 
   private void sendFetchFailure(MRApp app, TaskAttempt reduceAttempt, 
@@ -144,6 +144,6 @@ public class TestFetchFailure {
     app.getContext().getEventHandler().handle(
         new JobTaskAttemptFetchFailureEvent(
             reduceAttempt.getID(), 
-            Arrays.asList(new TaskAttemptID[] {mapAttempt.getID()})));
+            Arrays.asList(new TaskAttemptId[] {mapAttempt.getID()})));
   }
 }

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobHistoryParsing.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobHistoryParsing.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestJobHistoryParsing.java Thu Mar 31 22:23:22 2011
@@ -36,8 +36,8 @@ import org.apache.hadoop.mapreduce.jobhi
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
 import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.junit.Test;
@@ -53,7 +53,7 @@ public class TestJobHistoryParsing {
     MRApp app = new MRApp(2, 1, true);
     app.submit(conf);
     Job job = app.getContext().getAllJobs().values().iterator().next();
-    JobID jobId = job.getID();
+    JobId jobId = job.getID();
     LOG.info("JOBID is " + TypeConverter.fromYarn(jobId).toString());
     app.waitForState(job, JobState.SUCCEEDED);
     

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestKill.java Thu Mar 31 22:23:22 2011
@@ -25,6 +25,11 @@ import java.util.concurrent.CountDownLat
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
@@ -34,11 +39,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
 import org.junit.Test;
 
 /**
@@ -68,19 +68,19 @@ public class TestKill {
 
     //wait and validate for Job to be KILLED
     app.waitForState(job, JobState.KILLED);
-    Map<TaskID,Task> tasks = job.getTasks();
+    Map<TaskId,Task> tasks = job.getTasks();
     Assert.assertEquals("No of tasks is not correct", 1, 
         tasks.size());
     Task task = tasks.values().iterator().next();
     Assert.assertEquals("Task state not correct", TaskState.KILLED, 
-        task.getReport().state);
-    Map<TaskAttemptID, TaskAttempt> attempts = 
+        task.getReport().getTaskState());
+    Map<TaskAttemptId, TaskAttempt> attempts = 
       tasks.values().iterator().next().getAttempts();
     Assert.assertEquals("No of attempts is not correct", 1, 
         attempts.size());
     Iterator<TaskAttempt> it = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.KILLED, 
-          it.next().getReport().state);
+          it.next().getReport().getTaskAttemptState());
   }
 
   @Test
@@ -92,7 +92,7 @@ public class TestKill {
     
     //wait and vailidate for Job to become RUNNING
     app.waitForState(job, JobState.RUNNING);
-    Map<TaskID,Task> tasks = job.getTasks();
+    Map<TaskId,Task> tasks = job.getTasks();
     Assert.assertEquals("No of tasks is not correct", 2, 
         tasks.size());
     Iterator<Task> it = tasks.values().iterator();
@@ -113,22 +113,22 @@ public class TestKill {
     //Job is succeeded
     
     Assert.assertEquals("Task state not correct", TaskState.KILLED, 
-        task1.getReport().state);
+        task1.getReport().getTaskState());
     Assert.assertEquals("Task state not correct", TaskState.SUCCEEDED, 
-        task2.getReport().state);
-    Map<TaskAttemptID, TaskAttempt> attempts = task1.getAttempts();
+        task2.getReport().getTaskState());
+    Map<TaskAttemptId, TaskAttempt> attempts = task1.getAttempts();
     Assert.assertEquals("No of attempts is not correct", 1, 
         attempts.size());
     Iterator<TaskAttempt> iter = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.KILLED, 
-          iter.next().getReport().state);
+          iter.next().getReport().getTaskAttemptState());
 
     attempts = task2.getAttempts();
     Assert.assertEquals("No of attempts is not correct", 1, 
         attempts.size());
     iter = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.SUCCEEDED, 
-          iter.next().getReport().state);
+          iter.next().getReport().getTaskAttemptState());
   }
 
   @Test
@@ -140,7 +140,7 @@ public class TestKill {
     
     //wait and vailidate for Job to become RUNNING
     app.waitForState(job, JobState.RUNNING);
-    Map<TaskID,Task> tasks = job.getTasks();
+    Map<TaskId,Task> tasks = job.getTasks();
     Assert.assertEquals("No of tasks is not correct", 2, 
         tasks.size());
     Iterator<Task> it = tasks.values().iterator();
@@ -166,25 +166,25 @@ public class TestKill {
     //first Task will have two attempts 1st is killed, 2nd Succeeds
     //both Tasks and Job succeeds
     Assert.assertEquals("Task state not correct", TaskState.SUCCEEDED, 
-        task1.getReport().state);
+        task1.getReport().getTaskState());
     Assert.assertEquals("Task state not correct", TaskState.SUCCEEDED, 
-        task2.getReport().state);
+        task2.getReport().getTaskState());
  
-    Map<TaskAttemptID, TaskAttempt> attempts = task1.getAttempts();
+    Map<TaskAttemptId, TaskAttempt> attempts = task1.getAttempts();
     Assert.assertEquals("No of attempts is not correct", 2, 
         attempts.size());
     Iterator<TaskAttempt> iter = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.KILLED, 
-          iter.next().getReport().state);
+          iter.next().getReport().getTaskAttemptState());
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.SUCCEEDED, 
-        iter.next().getReport().state);
+        iter.next().getReport().getTaskAttemptState());
     
     attempts = task2.getAttempts();
     Assert.assertEquals("No of attempts is not correct", 1, 
         attempts.size());
     iter = attempts.values().iterator();
     Assert.assertEquals("Attempt state not correct", TaskAttemptState.SUCCEEDED, 
-          iter.next().getReport().state);
+          iter.next().getReport().getTaskAttemptState());
   }
 
   static class BlockingMRApp extends MRApp {
@@ -195,8 +195,8 @@ public class TestKill {
     }
 
     @Override
-    protected void attemptLaunched(TaskAttemptID attemptID) {
-      if (attemptID.taskID.id == 0 && attemptID.id == 0) {
+    protected void attemptLaunched(TaskAttemptId attemptID) {
+      if (attemptID.getTaskId().getId() == 0 && attemptID.getId() == 0) {
         //this blocks the first task's first attempt
         //the subsequent ones are completed
         try {

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRApp.java Thu Mar 31 22:23:22 2011
@@ -24,6 +24,9 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.mapreduce.MRJobConfig;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -32,9 +35,6 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskEventType;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
 import org.junit.Test;
 
 /**
@@ -111,7 +111,7 @@ public class TestMRApp {
     
     // reduces must be in NEW state
     Assert.assertEquals("Reduce Task state not correct",
-        TaskState.NEW, reduceTask.getReport().state);
+        TaskState.NEW, reduceTask.getReport().getTaskState());
     
     //send the done signal to the 1st map task
     app.getContext().getEventHandler().handle(

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRClientService.java Thu Mar 31 22:23:22 2011
@@ -24,6 +24,20 @@ import java.util.List;
 import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.Phase;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
 import org.apache.hadoop.mapreduce.v2.app.client.MRClientService;
@@ -35,18 +49,17 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.ipc.RPCUtil;
 import org.apache.hadoop.yarn.ipc.YarnRPC;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
-import org.apache.hadoop.mapreduce.v2.api.Phase;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
 import org.junit.Test;
 
 public class TestMRClientService {
 
+  private static RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+  
   @Test
   public void test() throws Exception {
     MRAppWithClientService app = new MRAppWithClientService(1, 0, false);
@@ -84,34 +97,58 @@ public class TestMRClientService {
     MRClientProtocol proxy =
       (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
           app.clientService.getBindAddress(), conf);
-    Assert.assertNotNull("Counters is null", proxy.getCounters(job.getID()));
-    Assert.assertNotNull("JobReport is null", proxy.getJobReport(job.getID()));
+    GetCountersRequest gcRequest = recordFactory.newRecordInstance(GetCountersRequest.class);    
+    gcRequest.setJobId(job.getID());
+    Assert.assertNotNull("Counters is null", proxy.getCounters(gcRequest).getCounters());
+    
+    GetJobReportRequest gjrRequest = recordFactory.newRecordInstance(GetJobReportRequest.class);
+    gjrRequest.setJobId(job.getID());
+    Assert.assertNotNull("JobReport is null", proxy.getJobReport(gjrRequest).getJobReport());
+    
+    GetTaskAttemptCompletionEventsRequest gtaceRequest = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsRequest.class);
+    gtaceRequest.setJobId(job.getID());
+    gtaceRequest.setFromEventId(0);
+    gtaceRequest.setMaxEvents(10);
     Assert.assertNotNull("TaskCompletionEvents is null", 
-        proxy.getTaskAttemptCompletionEvents(job.getID(), 0, 10));
+        proxy.getTaskAttemptCompletionEvents(gtaceRequest).getCompletionEventList());
+    
+    GetDiagnosticsRequest gdRequest = recordFactory.newRecordInstance(GetDiagnosticsRequest.class);
+    gdRequest.setTaskAttemptId(attempt.getID());
     Assert.assertNotNull("Diagnostics is null", 
-        proxy.getDiagnostics(attempt.getID()));
+        proxy.getDiagnostics(gdRequest).getDiagnosticsList());
+    
+    GetTaskAttemptReportRequest gtarRequest = recordFactory.newRecordInstance(GetTaskAttemptReportRequest.class);
+    gtarRequest.setTaskAttemptId(attempt.getID());
     Assert.assertNotNull("TaskAttemptReport is null", 
-        proxy.getTaskAttemptReport(attempt.getID()));
+        proxy.getTaskAttemptReport(gtarRequest).getTaskAttemptReport());
+    
+    GetTaskReportRequest gtrRequest = recordFactory.newRecordInstance(GetTaskReportRequest.class);
+    gtrRequest.setTaskId(task.getID());
     Assert.assertNotNull("TaskReport is null", 
-        proxy.getTaskReport(task.getID()));
+        proxy.getTaskReport(gtrRequest).getTaskReport());
     
+    GetTaskReportsRequest gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
+    gtreportsRequest.setJobId(job.getID());
+    gtreportsRequest.setTaskType(TaskType.MAP);
     Assert.assertNotNull("TaskReports for map is null", 
-        proxy.getTaskReports(job.getID(), 
-        TaskType.MAP));
+        proxy.getTaskReports(gtreportsRequest).getTaskReportList());
+    
+    gtreportsRequest = recordFactory.newRecordInstance(GetTaskReportsRequest.class);
+    gtreportsRequest.setJobId(job.getID());
+    gtreportsRequest.setTaskType(TaskType.REDUCE);
     Assert.assertNotNull("TaskReports for reduce is null", 
-        proxy.getTaskReports(job.getID(), 
-        TaskType.REDUCE));
+        proxy.getTaskReports(gtreportsRequest).getTaskReportList());
     
-    List<CharSequence> diag = proxy.getDiagnostics(attempt.getID());
+    List<String> diag = proxy.getDiagnostics(gdRequest).getDiagnosticsList();
     Assert.assertEquals("No of diagnostic not correct" , 2 , diag.size());
     Assert.assertEquals("Diag 1 not correct" , 
         diagnostic1, diag.get(0).toString());
     Assert.assertEquals("Diag 2 not correct" , 
         diagnostic2, diag.get(1).toString());
     
-    TaskReport taskReport = proxy.getTaskReport(task.getID());
+    TaskReport taskReport = proxy.getTaskReport(gtrRequest).getTaskReport();
     Assert.assertEquals("No of diagnostic not correct", 2, 
-        taskReport.diagnostics.size());
+        taskReport.getDiagnosticsCount());
     
     //send the done signal to the task
     app.getContext().getEventHandler().handle(