You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by ac...@apache.org on 2011/04/01 00:23:34 UTC

svn commit: r1087462 [5/20] - in /hadoop/mapreduce/branches/MR-279: ./ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/ mr-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/ mr-client/h...

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Thu Mar 31 22:23:22 2011
@@ -28,40 +28,48 @@ import java.util.Set;
 
 import junit.framework.Assert;
 
-import org.apache.avro.ipc.AvroRemoteException;
 import org.apache.commons.logging.Log;
 import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
 import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerAssignedEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
 import org.apache.hadoop.mapreduce.v2.app.rm.RMContainerAllocator;
 import org.apache.hadoop.net.NetworkTopology;
+import org.apache.hadoop.yarn.api.AMRMProtocol;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterResponse;
+import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
+import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
+import org.apache.hadoop.yarn.api.records.AMResponse;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ApplicationMaster;
+import org.apache.hadoop.yarn.api.records.ApplicationStatus;
+import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.api.records.ResourceRequest;
 import org.apache.hadoop.yarn.event.Event;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
+import org.apache.hadoop.yarn.server.api.records.NodeId;
 import org.apache.hadoop.yarn.server.resourcemanager.resourcetracker.NodeInfo;
 import org.apache.hadoop.yarn.server.resourcemanager.resourcetracker.RMResourceTrackerImpl;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
 import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
 import org.apache.hadoop.yarn.server.security.ContainerTokenSecretManager;
-import org.apache.hadoop.yarn.AMRMProtocol;
-import org.apache.hadoop.yarn.AMResponse;
-import org.apache.hadoop.yarn.ApplicationID;
-import org.apache.hadoop.yarn.ApplicationMaster;
-import org.apache.hadoop.yarn.ApplicationStatus;
-import org.apache.hadoop.yarn.Container;
-import org.apache.hadoop.yarn.ContainerID;
-import org.apache.hadoop.yarn.NodeID;
-import org.apache.hadoop.yarn.Resource;
-import org.apache.hadoop.yarn.ResourceRequest;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
 import org.junit.Test;
 
 public class TestRMContainerAllocator {
   private static final Log LOG = LogFactory.getLog(TestRMContainerAllocator.class);
+  private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 
   @Test
   public void testSimple() throws Exception {
@@ -194,32 +202,33 @@ public class TestRMContainerAllocator {
 
   private NodeInfo addNode(FifoScheduler scheduler, 
       String nodeName, int memory) {
-    NodeID nodeId = new NodeID();
-    nodeId.id = 0;
-    Resource resource = new Resource();
-    resource.memory = memory;
+    NodeId nodeId = recordFactory.newRecordInstance(NodeId.class);
+    nodeId.setId(0);
+    Resource resource = recordFactory.newRecordInstance(Resource.class);
+    resource.setMemory(memory);
     NodeInfo nodeManager = scheduler.addNode(nodeId, nodeName,
         RMResourceTrackerImpl.resolve(nodeName), resource); // Node registration
     return nodeManager;
   }
 
-  private FifoScheduler createScheduler() throws AvroRemoteException {
+  private FifoScheduler createScheduler() throws YarnRemoteException {
     FifoScheduler fsc = new FifoScheduler(new Configuration(),
         new ContainerTokenSecretManager()) {
       //override this to copy the objects
       //otherwise FifoScheduler updates the numContainers in same objects as kept by
       //RMContainerAllocator
+      
       @Override
-      public synchronized List<Container> allocate(ApplicationID applicationId,
+      public synchronized List<Container> allocate(ApplicationId applicationId,
           List<ResourceRequest> ask, List<Container> release) 
           throws IOException {
         List<ResourceRequest> askCopy = new ArrayList<ResourceRequest>();
         for (ResourceRequest req : ask) {
-          ResourceRequest reqCopy = new ResourceRequest();
-          reqCopy.priority = req.priority;
-          reqCopy.hostName = req.hostName;
-          reqCopy.capability = req.capability;
-          reqCopy.numContainers = req.numContainers;
+          ResourceRequest reqCopy = recordFactory.newRecordInstance(ResourceRequest.class);
+          reqCopy.setPriority(req.getPriority());
+          reqCopy.setHostName(req.getHostName());
+          reqCopy.setCapability(req.getCapability());
+          reqCopy.setNumContainers(req.getNumContainers());
           askCopy.add(reqCopy);
         }
         //no need to copy release
@@ -227,7 +236,7 @@ public class TestRMContainerAllocator {
       }
     };
     try {
-      fsc.addApplication(new ApplicationID(), "test", null, null);
+      fsc.addApplication(recordFactory.newRecordInstance(ApplicationId.class), "test", null, null);
     } catch(IOException ie) {
       LOG.info("add application failed with ", ie);
       assert(false);
@@ -237,10 +246,10 @@ public class TestRMContainerAllocator {
 
   private ContainerRequestEvent createReq(
       int attemptid, int memory, int priority, String[] hosts) {
-    TaskAttemptID attemptId = new TaskAttemptID();
-    attemptId.id = attemptid;
-    Resource containerNeed = new Resource();
-    containerNeed.memory = memory;
+    TaskAttemptId attemptId = recordFactory.newRecordInstance(TaskAttemptId.class);
+    attemptId.setId(attemptid);
+    Resource containerNeed = recordFactory.newRecordInstance(Resource.class);
+    containerNeed.setMemory(memory);
     return new ContainerRequestEvent(attemptId, 
         containerNeed, priority,
         hosts, new String[] {NetworkTopology.DEFAULT_RACK});
@@ -254,7 +263,7 @@ public class TestRMContainerAllocator {
         requests.length, assignments.size());
 
     //check for uniqueness of containerIDs
-    Set<ContainerID> containerIds = new HashSet<ContainerID>();
+    Set<ContainerId> containerIds = new HashSet<ContainerId>();
     for (TaskAttemptContainerAssignedEvent assigned : assignments) {
       containerIds.add(assigned.getContainerID());
     }
@@ -302,29 +311,32 @@ public class TestRMContainerAllocator {
       }
 
       @Override
-      public Void registerApplicationMaster(
-          ApplicationMaster applicationMaster) throws AvroRemoteException {
+      public RegisterApplicationMasterResponse registerApplicationMaster(RegisterApplicationMasterRequest request) throws YarnRemoteException {
+        ApplicationMaster applicationMaster = request.getApplicationMaster();
+        RegisterApplicationMasterResponse response = recordFactory.newRecordInstance(RegisterApplicationMasterResponse.class);
         return null;
       }
 
-      @Override
-      public AMResponse allocate(ApplicationStatus status,
-          List<ResourceRequest> ask, List<Container> release)
-          throws AvroRemoteException {
+      public AllocateResponse allocate(AllocateRequest request) throws YarnRemoteException {
+        ApplicationStatus status = request.getApplicationStatus();
+        List<ResourceRequest> ask = request.getAskList();
+        List<Container> release = request.getReleaseList();
         try {
-          AMResponse response = new AMResponse();
-          response.containers = resourceScheduler.allocate(status.applicationId, ask, release);
-          return response;
+          AMResponse response = recordFactory.newRecordInstance(AMResponse.class);
+          response.addAllContainers(resourceScheduler.allocate(status.getApplicationId(), ask, release));
+          AllocateResponse allocateResponse = recordFactory.newRecordInstance(AllocateResponse.class);
+          allocateResponse.setAMResponse(response);
+          return allocateResponse;
         } catch(IOException ie) {
           throw RPCUtil.getRemoteException(ie);
         }
       }
 
       @Override
-      public Void finishApplicationMaster(ApplicationMaster applicationMaster)
-      throws AvroRemoteException {
-        // TODO Auto-generated method stub
-        return null;
+      public FinishApplicationMasterResponse finishApplicationMaster(FinishApplicationMasterRequest request) throws YarnRemoteException {
+        ApplicationMaster applicationMaster = request.getApplicationMaster();
+        FinishApplicationMasterResponse response = recordFactory.newRecordInstance(FinishApplicationMasterResponse.class);
+        return response;
       }
 
     }
@@ -380,12 +392,12 @@ public class TestRMContainerAllocator {
         this.events = events;
       }
       @Override
-      public Map<JobID, Job> getAllJobs() {
+      public Map<JobId, Job> getAllJobs() {
         return null;
       }
       @Override
-      public ApplicationID getApplicationID() {
-        return new ApplicationID();
+      public ApplicationId getApplicationID() {
+        return recordFactory.newRecordInstance(ApplicationId.class);
       }
       @Override
       public EventHandler getEventHandler() {
@@ -397,12 +409,12 @@ public class TestRMContainerAllocator {
         };
       }
       @Override
-      public Job getJob(JobID jobID) {
+      public Job getJob(JobId jobID) {
         return null;
       }
 
       @Override
-      public CharSequence getUser() {
+      public String getUser() {
         return null;
       }
     }

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java Thu Mar 31 22:23:22 2011
@@ -32,6 +32,18 @@ import java.util.concurrent.ConcurrentLi
 import java.util.concurrent.atomic.AtomicInteger;
 import java.util.concurrent.atomic.AtomicLong;
 import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.Clock;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -46,24 +58,14 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.app.speculate.Speculator;
 import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
 import org.apache.hadoop.mapreduce.v2.app.speculate.TaskRuntimeEstimator;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.event.AsyncDispatcher;
 import org.apache.hadoop.yarn.event.Dispatcher;
 import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.service.CompositeService;
-import org.apache.hadoop.yarn.ApplicationID;
-import org.apache.hadoop.yarn.ContainerID;
-import org.apache.hadoop.mapreduce.v2.api.Counters;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskReport;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
 import org.junit.Assert;
 import org.junit.Test;
 
@@ -102,6 +104,8 @@ public class TestRuntimeEstimators {
       = new AtomicInteger(0);
   private final AtomicLong taskTimeSavedBySpeculation
       = new AtomicLong(0L);
+  
+  private final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
 
   private void coreTestEstimator
       (TaskRuntimeEstimator testedEstimator, int expectedSpeculations) {
@@ -255,7 +259,7 @@ public class TestRuntimeEstimators {
 
     @Override
     public void handle(TaskEvent event) {
-      TaskID taskID = event.getTaskID();
+      TaskId taskID = event.getTaskID();
       Task task = myJob.getTask(taskID);
 
       Assert.assertEquals
@@ -274,21 +278,21 @@ public class TestRuntimeEstimators {
   }
 
   class MyTaskImpl implements Task {
-    private final TaskID taskID;
-    private final Map<TaskAttemptID, TaskAttempt> attempts
-        = new HashMap<TaskAttemptID, TaskAttempt>(4);
-
-    MyTaskImpl(JobID jobID, int index, TaskType type) {
-      taskID = new TaskID();
-      taskID.id = index;
-      taskID.taskType = type;
-      taskID.jobID = jobID;
+    private final TaskId taskID;
+    private final Map<TaskAttemptId, TaskAttempt> attempts
+        = new HashMap<TaskAttemptId, TaskAttempt>(4);
+
+    MyTaskImpl(JobId jobID, int index, TaskType type) {
+      taskID = recordFactory.newRecordInstance(TaskId.class);
+      taskID.setId(index);
+      taskID.setTaskType(type);
+      taskID.setJobId(jobID);
     }
 
     void addAttempt() {
       TaskAttempt taskAttempt
           = new MyTaskAttemptImpl(taskID, attempts.size(), clock);
-      TaskAttemptID taskAttemptID = taskAttempt.getID();
+      TaskAttemptId taskAttemptID = taskAttempt.getID();
 
       attempts.put(taskAttemptID, taskAttempt);
 
@@ -299,7 +303,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public TaskID getID() {
+    public TaskId getID() {
       return taskID;
     }
 
@@ -327,19 +331,19 @@ public class TestRuntimeEstimators {
 
     @Override
     public TaskType getType() {
-      return taskID.taskType;
+      return taskID.getTaskType();
     }
 
     @Override
-    public Map<TaskAttemptID, TaskAttempt> getAttempts() {
-      Map<TaskAttemptID, TaskAttempt> result
-          = new HashMap<TaskAttemptID, TaskAttempt>(attempts.size());
+    public Map<TaskAttemptId, TaskAttempt> getAttempts() {
+      Map<TaskAttemptId, TaskAttempt> result
+          = new HashMap<TaskAttemptId, TaskAttempt>(attempts.size());
       result.putAll(attempts);
       return result;
     }
 
     @Override
-    public TaskAttempt getAttempt(TaskAttemptID attemptID) {
+    public TaskAttempt getAttempt(TaskAttemptId attemptID) {
       return attempts.get(attemptID);
     }
 
@@ -355,7 +359,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public boolean canCommit(TaskAttemptID taskAttemptID) {
+    public boolean canCommit(TaskAttemptId taskAttemptID) {
       throw new UnsupportedOperationException("Not supported yet.");
     }
 
@@ -367,12 +371,12 @@ public class TestRuntimeEstimators {
   }
 
   class MyJobImpl implements Job {
-    private final JobID jobID;
-    private final Map<TaskID, Task> allTasks = new HashMap<TaskID, Task>();
-    private final Map<TaskID, Task> mapTasks = new HashMap<TaskID, Task>();
-    private final Map<TaskID, Task> reduceTasks = new HashMap<TaskID, Task>();
+    private final JobId jobID;
+    private final Map<TaskId, Task> allTasks = new HashMap<TaskId, Task>();
+    private final Map<TaskId, Task> mapTasks = new HashMap<TaskId, Task>();
+    private final Map<TaskId, Task> reduceTasks = new HashMap<TaskId, Task>();
 
-    MyJobImpl(JobID jobID, int numMaps, int numReduces) {
+    MyJobImpl(JobId jobID, int numMaps, int numReduces) {
       this.jobID = jobID;
       for (int i = 0; i < numMaps; ++i) {
         Task newTask = new MyTaskImpl(jobID, i, TaskType.MAP);
@@ -393,7 +397,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public JobID getID() {
+    public JobId getID() {
       return jobID;
     }
 
@@ -413,17 +417,17 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public Map<TaskID, Task> getTasks() {
+    public Map<TaskId, Task> getTasks() {
       return allTasks;
     }
 
     @Override
-    public Map<TaskID, Task> getTasks(TaskType taskType) {
+    public Map<TaskId, Task> getTasks(TaskType taskType) {
       return taskType == TaskType.MAP ? mapTasks : reduceTasks;
     }
 
     @Override
-    public Task getTask(TaskID taskID) {
+    public Task getTask(TaskId taskID) {
       return allTasks.get(taskID);
     }
 
@@ -449,7 +453,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public CharSequence getName() {
+    public String getName() {
       throw new UnsupportedOperationException("Not supported yet.");
     }
 
@@ -470,7 +474,7 @@ public class TestRuntimeEstimators {
    * NEW state.  Attempts transition only from NEW to RUNNING to SUCCEEDED .
    */
   class MyTaskAttemptImpl implements TaskAttempt {
-    private final TaskAttemptID myAttemptID;
+    private final TaskAttemptId myAttemptID;
 
     long startMockTime = Long.MIN_VALUE;
 
@@ -478,26 +482,26 @@ public class TestRuntimeEstimators {
 
     TaskAttemptState overridingState = TaskAttemptState.NEW;
 
-    MyTaskAttemptImpl(TaskID taskID, int index, Clock clock) {
-      myAttemptID = new TaskAttemptID();
-      myAttemptID.id = index;
-      myAttemptID.taskID = taskID;
+    MyTaskAttemptImpl(TaskId taskID, int index, Clock clock) {
+      myAttemptID = recordFactory.newRecordInstance(TaskAttemptId.class);
+      myAttemptID.setId(index);
+      myAttemptID.setTaskId(taskID);
     }
 
     void startUp() {
       startMockTime = clock.getTime();
       overridingState = null;
 
-      slotsInUse.addAndGet(taskTypeSlots(myAttemptID.taskID.taskType));
+      slotsInUse.addAndGet(taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
 
       System.out.println("TLTRE.MyTaskAttemptImpl.startUp starting " + getID());
 
-      SpeculatorEvent event = new SpeculatorEvent(getID().taskID, -1);
+      SpeculatorEvent event = new SpeculatorEvent(getID().getTaskId(), -1);
       dispatcher.getEventHandler().handle(event);
     }
 
     @Override
-    public TaskAttemptID getID() {
+    public TaskAttemptId getID() {
       return myAttemptID;
     }
 
@@ -507,7 +511,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public List<CharSequence> getDiagnostics() {
+    public List<String> getDiagnostics() {
       throw new UnsupportedOperationException("Not supported yet.");
     }
 
@@ -517,8 +521,8 @@ public class TestRuntimeEstimators {
     }
 
     private float getCodeRuntime() {
-      int taskIndex = myAttemptID.taskID.id;
-      int attemptIndex = myAttemptID.id;
+      int taskIndex = myAttemptID.getTaskId().getId();
+      int attemptIndex = myAttemptID.getId();
 
       float result = 200.0F;
 
@@ -553,7 +557,7 @@ public class TestRuntimeEstimators {
     }
 
     private float getReduceProgress() {
-      Job job = myAppContext.getJob(myAttemptID.taskID.jobID);
+      Job job = myAppContext.getJob(myAttemptID.getTaskId().getJobId());
       float runtime = getCodeRuntime();
 
       Collection<Task> allMapTasks = job.getTasks(TaskType.MAP).values();
@@ -585,7 +589,7 @@ public class TestRuntimeEstimators {
       if (overridingState == TaskAttemptState.NEW) {
         return 0.0F;
       }
-      return myAttemptID.taskID.taskType == TaskType.MAP ? getMapProgress() : getReduceProgress();
+      return myAttemptID.getTaskId().getTaskType() == TaskType.MAP ? getMapProgress() : getReduceProgress();
     }
 
     @Override
@@ -601,13 +605,13 @@ public class TestRuntimeEstimators {
 
         System.out.println("MyTaskAttemptImpl.getState() -- attempt " + myAttemptID + " finished.");
 
-        slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.taskID.taskType));
+        slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
 
-        (myAttemptID.taskID.taskType == TaskType.MAP
+        (myAttemptID.getTaskId().getTaskType() == TaskType.MAP
             ? completedMaps : completedReduces).getAndIncrement();
 
         // check for a spectacularly successful speculation
-        TaskID taskID = myAttemptID.taskID;
+        TaskId taskID = myAttemptID.getTaskId();
         Task undoneTask = null;
 
         Task task = myJob.getTask(taskID);
@@ -617,7 +621,7 @@ public class TestRuntimeEstimators {
               && otherAttempt.getState() == TaskAttemptState.RUNNING) {
             // we had two instances running.  Try to determine how much
             //  we might have saved by speculation
-            if (getID().id > otherAttempt.getID().id) {
+            if (getID().getId() > otherAttempt.getID().getId()) {
               // the speculation won
               successfulSpeculations.getAndIncrement();
               float hisProgress = otherAttempt.getProgress();
@@ -638,7 +642,7 @@ public class TestRuntimeEstimators {
               long estimatedSavings = originalTaskEndEstimate - clock.getTime();
               taskTimeSavedBySpeculation.addAndGet(estimatedSavings);
               System.out.println("TLTRE: The task is " + task.getID());
-              slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.taskID.taskType));
+              slotsInUse.addAndGet(- taskTypeSlots(myAttemptID.getTaskId().getTaskType()));
               ((MyTaskAttemptImpl)otherAttempt).overridingState
                   = TaskAttemptState.KILLED;
             } else {
@@ -659,7 +663,7 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public ContainerID getAssignedContainerID() {
+    public ContainerId getAssignedContainerID() {
       throw new UnsupportedOperationException("Not supported yet.");
     }
 
@@ -711,17 +715,17 @@ public class TestRuntimeEstimators {
   class MyAppContext implements AppContext {
     // I'll be making Avro objects by hand.  Please don't do that very often.
 
-    private final ApplicationID myApplicationID;
-    private final JobID myJobID;
-    private final Map<JobID, Job> allJobs;
+    private final ApplicationId myApplicationID;
+    private final JobId myJobID;
+    private final Map<JobId, Job> allJobs;
 
     MyAppContext(int numberMaps, int numberReduces) {
-      myApplicationID = new ApplicationID();
-      myApplicationID.clusterTimeStamp = clock.getTime();
-      myApplicationID.id = 1;
+      myApplicationID = recordFactory.newRecordInstance(ApplicationId.class);
+      myApplicationID.setClusterTimestamp(clock.getTime());
+      myApplicationID.setId(1);
 
-      myJobID = new JobID();
-      myJobID.appID = myApplicationID;
+      myJobID = recordFactory.newRecordInstance(JobId.class);
+      myJobID.setAppId(myApplicationID);
 
       Job myJob
           = new MyJobImpl(myJobID, numberMaps, numberReduces);
@@ -730,17 +734,17 @@ public class TestRuntimeEstimators {
     }
 
     @Override
-    public ApplicationID getApplicationID() {
+    public ApplicationId getApplicationID() {
       return myApplicationID;
     }
 
     @Override
-    public Job getJob(JobID jobID) {
+    public Job getJob(JobId jobID) {
       return allJobs.get(jobID);
     }
 
     @Override
-    public Map<JobID, Job> getAllJobs() {
+    public Map<JobId, Job> getAllJobs() {
       return allJobs;
     }
 

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Thu Mar 31 22:23:22 2011
@@ -18,36 +18,30 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import com.google.inject.Injector;
-import java.util.Map;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.APP_ID;
+import static org.junit.Assert.assertEquals;
 
-import org.junit.Test;
-import static org.junit.Assert.*;
+import java.util.Map;
 
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.MockJobs;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp;
-import org.apache.hadoop.mapreduce.v2.app.webapp.AppController;
-import org.apache.hadoop.mapreduce.v2.app.webapp.AppView;
-import org.apache.hadoop.mapreduce.v2.app.webapp.JobPage;
-import org.apache.hadoop.mapreduce.v2.app.webapp.TaskPage;
-import org.apache.hadoop.mapreduce.v2.app.webapp.TasksPage;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.event.EventHandler;
 import org.apache.hadoop.yarn.util.Apps;
 import org.apache.hadoop.yarn.webapp.WebApps;
 import org.apache.hadoop.yarn.webapp.test.WebAppTests;
-import org.apache.hadoop.yarn.ApplicationID;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
+import org.junit.Test;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
+import com.google.inject.Injector;
 
 public class TestAMWebApp {
 
   static class TestAppContext implements AppContext {
-    final ApplicationID appID;
+    final ApplicationId appID;
     final String user = MockJobs.newUserName();
-    final Map<JobID, Job> jobs;
+    final Map<JobId, Job> jobs;
 
     TestAppContext(int appid, int numJobs, int numTasks, int numAttempts) {
       appID = MockJobs.newAppID(appid);
@@ -59,7 +53,7 @@ public class TestAMWebApp {
     }
 
     @Override
-    public ApplicationID getApplicationID() {
+    public ApplicationId getApplicationID() {
       return appID;
     }
 
@@ -69,12 +63,12 @@ public class TestAMWebApp {
     }
 
     @Override
-    public Job getJob(JobID jobID) {
+    public Job getJob(JobId jobID) {
       return jobs.get(jobID);
     }
 
     @Override
-    public Map<JobID, Job> getAllJobs() {
+    public Map<JobId, Job> getAllJobs() {
       return jobs; // OK
     }
 

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/pom.xml?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/pom.xml (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/pom.xml Thu Mar 31 22:23:22 2011
@@ -1,4 +1,5 @@
-<?xml version="1.0"?><project>
+<?xml version="1.0"?>
+<project>
   <parent>
     <artifactId>hadoop-mapreduce-client</artifactId>
     <groupId>org.apache.hadoop</groupId>
@@ -41,11 +42,16 @@
       <artifactId>hadoop-mapreduce-client-core</artifactId>
       <version>${yarn.version}</version>
     </dependency>
+    <dependency>
+      <groupId>com.google.protobuf</groupId>
+      <artifactId>protobuf-java</artifactId>
+      <version>2.4.0a</version>
+    </dependency>
   </dependencies>
 
   <build>
     <plugins>
-      <plugin>
+      <!--plugin>
         <groupId>org.apache.avro</groupId>
         <artifactId>avro-maven-plugin</artifactId>
         <version>1.4.0-SNAPSHOT</version>
@@ -57,7 +63,78 @@
             </goals>
           </execution>
         </executions>
+      </plugin-->
+
+      <plugin>
+        <groupId>org.apache.maven.plugins</groupId>
+        <artifactId>maven-antrun-plugin</artifactId>
+        <executions>
+          <execution>
+            <id>create_generate_src_dirctory</id>
+            <phase>initialize</phase>
+            <configuration>
+              <tasks>
+                <mkdir dir="target/generated-sources/proto" />
+              </tasks>
+            </configuration>
+            <goals>
+              <goal>run</goal>
+            </goals>
+          </execution>
+        </executions>
       </plugin>
+
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>exec-maven-plugin</artifactId>
+        <version>1.2</version>
+        <executions>
+          <execution>
+            <id>generate-sources</id>
+            <phase>generate-sources</phase>
+            <configuration>
+              <executable>protoc</executable>
+              <arguments>
+                <argument>-I../../yarn/yarn-api/src/main/proto/</argument>
+                <argument>-Isrc/main/proto/</argument>
+                <argument>--java_out=target/generated-sources/proto</argument>
+                <argument>src/main/proto/mr_protos.proto</argument>
+                <argument>src/main/proto/mr_service_protos.proto</argument>
+                <argument>src/main/proto/MRClientProtocol.proto</argument>
+              </arguments>
+            </configuration>
+            <goals>
+              <goal>exec</goal>
+            </goals>
+          </execution>
+        </executions>
+      </plugin>
+
+      <plugin>
+        <groupId>org.codehaus.mojo</groupId>
+        <artifactId>build-helper-maven-plugin</artifactId>
+        <version>1.5</version>
+        <executions>
+          <execution>
+            <id>add-source</id>
+            <phase>generate-sources</phase>
+            <goals>
+              <goal>add-source</goal>
+            </goals>
+            <configuration>
+              <sources>
+                <source>target/generated-sources/proto</source>
+              </sources>
+            </configuration>
+          </execution>
+        </executions>
+      </plugin>
+
+
+
+
+
+
     </plugins>
   </build>
 </project>

Modified: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1087462&r1=1087461&r2=1087462&view=diff
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Thu Mar 31 22:23:22 2011
@@ -23,42 +23,48 @@ import java.util.HashMap;
 import java.util.List;
 
 import org.apache.hadoop.mapred.JobPriority;
+import org.apache.hadoop.mapred.TIPStatus;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
-import org.apache.hadoop.mapreduce.v2.api.Counter;
-import org.apache.hadoop.mapreduce.v2.api.CounterGroup;
-import org.apache.hadoop.mapreduce.v2.api.Counters;
-import org.apache.hadoop.mapreduce.v2.api.JobID;
-import org.apache.hadoop.mapreduce.v2.api.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.JobState;
-import org.apache.hadoop.mapreduce.v2.api.Phase;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEventStatus;
-import org.apache.hadoop.mapreduce.v2.api.TaskAttemptID;
-import org.apache.hadoop.mapreduce.v2.api.TaskID;
-import org.apache.hadoop.mapreduce.v2.api.TaskState;
-import org.apache.hadoop.mapreduce.v2.api.TaskType;
-import org.apache.hadoop.yarn.ApplicationID;
+import org.apache.hadoop.mapreduce.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.mapreduce.v2.api.records.Phase;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEventStatus;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 
 public class TypeConverter {
 
-  public static org.apache.hadoop.mapred.JobID fromYarn(JobID id) {
-    String identifier = fromClusterTimeStamp(id.appID.clusterTimeStamp);
-    return new org.apache.hadoop.mapred.JobID(identifier, id.id);
+  public static org.apache.hadoop.mapred.JobID fromYarn(JobId id) {
+    String identifier = fromClusterTimeStamp(id.getAppId().getClusterTimestamp());
+    return new org.apache.hadoop.mapred.JobID(identifier, id.getId());
   }
 
   //currently there is 1-1 mapping between appid and jobid
-  public static org.apache.hadoop.mapreduce.JobID fromYarn(ApplicationID appID) {
-    String identifier = fromClusterTimeStamp(appID.clusterTimeStamp);
-    return new org.apache.hadoop.mapred.JobID(identifier, appID.id);
+  public static org.apache.hadoop.mapreduce.JobID fromYarn(ApplicationId appID) {
+    String identifier = fromClusterTimeStamp(appID.getClusterTimestamp());
+    return new org.apache.hadoop.mapred.JobID(identifier, appID.getId());
   }
 
-  public static JobID toYarn(org.apache.hadoop.mapreduce.JobID id) {
-    JobID jobID = new JobID();
-    jobID.id = id.getId(); //currently there is 1-1 mapping between appid and jobid
-    jobID.appID = new ApplicationID();
-    jobID.appID.id = id.getId();
-    jobID.appID.clusterTimeStamp = toClusterTimeStamp(id.getJtIdentifier());
-    return jobID;
+  public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
+    JobId jobId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
+    jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
+    
+    ApplicationId appId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(ApplicationId.class);
+    appId.setId(id.getId());
+    appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
+    jobId.setAppId(appId);
+    return jobId;
   }
 
   private static String fromClusterTimeStamp(long clusterTimeStamp) {
@@ -93,17 +99,17 @@ public class TypeConverter {
     }
   }
 
-  public static org.apache.hadoop.mapred.TaskID fromYarn(TaskID id) {
-    return new org.apache.hadoop.mapred.TaskID(fromYarn(id.jobID), fromYarn(id.taskType),
-        id.id);
+  public static org.apache.hadoop.mapred.TaskID fromYarn(TaskId id) {
+    return new org.apache.hadoop.mapred.TaskID(fromYarn(id.getJobId()), fromYarn(id.getTaskType()),
+        id.getId());
   }
 
-  public static TaskID toYarn(org.apache.hadoop.mapreduce.TaskID id) {
-    TaskID taskID = new TaskID();
-    taskID.id = id.getId();
-    taskID.taskType = toYarn(id.getTaskType());
-    taskID.jobID = toYarn(id.getJobID());
-    return taskID;
+  public static TaskId toYarn(org.apache.hadoop.mapreduce.TaskID id) {
+    TaskId taskId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
+    taskId.setId(id.getId());
+    taskId.setTaskType(toYarn(id.getTaskType()));
+    taskId.setJobId(toYarn(id.getJobID()));
+    return taskId;
   }
 
   public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
@@ -125,11 +131,11 @@ public class TypeConverter {
   }
 
   public static TaskCompletionEvent[] fromYarn(
-      org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent[] newEvents) {
+      TaskAttemptCompletionEvent[] newEvents) {
     TaskCompletionEvent[] oldEvents =
         new TaskCompletionEvent[newEvents.length];
     int i = 0;
-    for (org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent newEvent 
+    for (TaskAttemptCompletionEvent newEvent 
         : newEvents) {
       oldEvents[i++] = fromYarn(newEvent);
     }
@@ -137,12 +143,12 @@ public class TypeConverter {
   }
 
   public static TaskCompletionEvent fromYarn(
-      org.apache.hadoop.mapreduce.v2.api.TaskAttemptCompletionEvent newEvent) {
-    return new TaskCompletionEvent(newEvent.eventId,
-              fromYarn(newEvent.attemptId), newEvent.attemptId.id,
-              newEvent.attemptId.taskID.taskType.equals(TaskType.MAP),
-              fromYarn(newEvent.status),
-              newEvent.mapOutputServerAddress.toString());
+      TaskAttemptCompletionEvent newEvent) {
+    return new TaskCompletionEvent(newEvent.getEventId(),
+              fromYarn(newEvent.getAttemptId()), newEvent.getAttemptId().getId(),
+              newEvent.getAttemptId().getTaskId().getTaskType().equals(TaskType.MAP),
+              fromYarn(newEvent.getStatus()),
+              newEvent.getMapOutputServerAddress());
   }
 
   public static TaskCompletionEvent.Status fromYarn(
@@ -163,78 +169,78 @@ public class TypeConverter {
   }
 
   public static org.apache.hadoop.mapred.TaskAttemptID fromYarn(
-      TaskAttemptID id) {
-    return new org.apache.hadoop.mapred.TaskAttemptID(fromYarn(id.taskID),
-        id.id);
+      TaskAttemptId id) {
+    return new org.apache.hadoop.mapred.TaskAttemptID(fromYarn(id.getTaskId()),
+        id.getId());
   }
 
-  public static TaskAttemptID toYarn(
+  public static TaskAttemptId toYarn(
       org.apache.hadoop.mapred.TaskAttemptID id) {
-    TaskAttemptID taskAttemptID = new TaskAttemptID();
-    taskAttemptID.taskID = toYarn(id.getTaskID());
-    taskAttemptID.id = id.getId();
-    return taskAttemptID;
+    TaskAttemptId taskAttemptId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
+    taskAttemptId.setTaskId(toYarn(id.getTaskID()));
+    taskAttemptId.setId(id.getId());
+    return taskAttemptId;
   }
 
-  public static TaskAttemptID toYarn(
+  public static TaskAttemptId toYarn(
       org.apache.hadoop.mapreduce.TaskAttemptID id) {
-    TaskAttemptID taskAttemptID = new TaskAttemptID();
-    taskAttemptID.taskID = toYarn(id.getTaskID());
-    taskAttemptID.id = id.getId();
-    return taskAttemptID;
+    TaskAttemptId taskAttemptId = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
+    taskAttemptId.setTaskId(toYarn(id.getTaskID()));
+    taskAttemptId.setId(id.getId());
+    return taskAttemptId;
   }
   
   public static org.apache.hadoop.mapreduce.Counters fromYarn(
       Counters yCntrs) {
     org.apache.hadoop.mapreduce.Counters counters = 
       new org.apache.hadoop.mapreduce.Counters();
-    for (CounterGroup yGrp : yCntrs.groups.values()) {
-      for (Counter yCntr : yGrp.counters.values()) {
+    for (CounterGroup yGrp : yCntrs.getAllCounterGroups().values()) {
+      for (Counter yCntr : yGrp.getAllCounters().values()) {
         org.apache.hadoop.mapreduce.Counter c = 
-          counters.findCounter(yGrp.displayname.toString(), 
-              yCntr.displayName.toString());
-        c.setValue(yCntr.value);
+          counters.findCounter(yGrp.getDisplayName(), 
+              yCntr.getDisplayName());
+        c.setValue(yCntr.getValue());
       }
     }
     return counters;
   }
 
   public static Counters toYarn(org.apache.hadoop.mapred.Counters counters) {
-    Counters yCntrs = new Counters();
-    yCntrs.groups = new HashMap<CharSequence, CounterGroup>();
+    Counters yCntrs = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counters.class);
+    yCntrs.addAllCounterGroups(new HashMap<String, CounterGroup>());
     for (org.apache.hadoop.mapred.Counters.Group grp : counters) {
-      CounterGroup yGrp = new CounterGroup();
-      yGrp.name = grp.getName();
-      yGrp.displayname = grp.getDisplayName();
-      yGrp.counters = new HashMap<CharSequence, Counter>();
+      CounterGroup yGrp = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(CounterGroup.class);
+      yGrp.setName(grp.getName());
+      yGrp.setDisplayName(grp.getDisplayName());
+      yGrp.addAllCounters(new HashMap<String, Counter>());
       for (org.apache.hadoop.mapred.Counters.Counter cntr : grp) {
-        Counter yCntr = new Counter();
-        yCntr.name = cntr.getName();
-        yCntr.displayName = cntr.getDisplayName();
-        yCntr.value = cntr.getValue();
-        yGrp.counters.put(yCntr.name, yCntr);
+        Counter yCntr = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counter.class);
+        yCntr.setName(cntr.getName());
+        yCntr.setDisplayName(cntr.getDisplayName());
+        yCntr.setValue(cntr.getValue());
+        yGrp.setCounter(yCntr.getName(), yCntr);
       }
-      yCntrs.groups.put(yGrp.name, yGrp);
+      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
     }
     return yCntrs;
   }
 
   public static Counters toYarn(org.apache.hadoop.mapreduce.Counters counters) {
-    Counters yCntrs = new Counters();
-    yCntrs.groups = new HashMap<CharSequence, CounterGroup>();
+    Counters yCntrs = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counters.class);
+    yCntrs.addAllCounterGroups(new HashMap<String, CounterGroup>());
     for (org.apache.hadoop.mapreduce.CounterGroup grp : counters) {
-      CounterGroup yGrp = new CounterGroup();
-      yGrp.name = grp.getName();
-      yGrp.displayname = grp.getDisplayName();
-      yGrp.counters = new HashMap<CharSequence, Counter>();
+      CounterGroup yGrp = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(CounterGroup.class);
+      yGrp.setName(grp.getName());
+      yGrp.setDisplayName(grp.getDisplayName());
+      yGrp.addAllCounters(new HashMap<String, Counter>());
       for (org.apache.hadoop.mapreduce.Counter cntr : grp) {
-        Counter yCntr = new Counter();
-        yCntr.name = cntr.getName();
-        yCntr.displayName = cntr.getDisplayName();
-        yCntr.value = cntr.getValue();
-        yGrp.counters.put(yCntr.name, yCntr);
+        Counter yCntr = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(Counter.class);
+        yCntr.setName(cntr.getName());
+        yCntr.setDisplayName(cntr.getDisplayName());
+        yCntr.setValue(cntr.getValue());
+        yGrp.setCounter(yCntr.getName(), yCntr);
       }
-      yCntrs.groups.put(yGrp.name, yGrp);
+      yCntrs.setCounterGroup(yGrp.getName(), yGrp);
     }
     return yCntrs;
   }
@@ -243,10 +249,10 @@ public class TypeConverter {
       JobReport jobreport, String jobFile, String trackingUrl) {
     String user = null,  jobName = null;
     JobPriority jobPriority = JobPriority.NORMAL;
-    return new org.apache.hadoop.mapred.JobStatus(fromYarn(jobreport.id),
-        jobreport.setupProgress, jobreport.mapProgress,
-        jobreport.reduceProgress, jobreport.cleanupProgress,
-        fromYarn(jobreport.state),
+    return new org.apache.hadoop.mapred.JobStatus(fromYarn(jobreport.getJobId()),
+        jobreport.getSetupProgress(), jobreport.getMapProgress(),
+        jobreport.getReduceProgress(), jobreport.getCleanupProgress(),
+        fromYarn(jobreport.getJobState()),
         jobPriority, user, jobName, jobFile, trackingUrl);
   }
   
@@ -287,38 +293,39 @@ public class TypeConverter {
     throw new YarnException("Unrecognized task state: " + state);
   }
   
-  public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.TaskReport report) {
+  public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
     String[] diagnostics = null;
-    if (report.diagnostics != null) {
-      diagnostics = new String[report.diagnostics.size()];
+    if (report.getDiagnosticsList() != null) {
+      diagnostics = new String[report.getDiagnosticsCount()];
       int i = 0;
-      for (CharSequence cs : report.diagnostics) {
+      for (String cs : report.getDiagnosticsList()) {
         diagnostics[i++] = cs.toString();
       }
     } else {
       diagnostics = new String[0];
     }
-    TaskReport rep = new TaskReport(fromYarn(report.id), 
-        report.progress, report.state.toString(),
-      diagnostics, fromYarn(report.state), report.startTime, report.finishTime,
-      fromYarn(report.counters));
+    
+    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()), 
+        report.getProgress(), report.getTaskState().toString(),
+      diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(),
+      fromYarn(report.getCounters()));
     List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts 
           = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
-    for (org.apache.hadoop.mapreduce.v2.api.TaskAttemptID id 
-        : report.runningAttempts) {
+    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id 
+        : report.getRunningAttemptsList()) {
       runningAtts.add(fromYarn(id));
     }
     rep.setRunningTaskAttemptIds(runningAtts);
-    if (report.successfulAttempt != null) {
-      rep.setSuccessfulAttemptId(fromYarn(report.successfulAttempt));
+    if (report.getSuccessfulAttempt() != null) {
+      rep.setSuccessfulAttemptId(fromYarn(report.getSuccessfulAttempt()));
     }
     return rep;
   }
   
   public static List<TaskReport> fromYarn(
-      List<org.apache.hadoop.mapreduce.v2.api.TaskReport> taskReports) {
+      List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports) {
     List<TaskReport> reports = new ArrayList<TaskReport>();
-    for (org.apache.hadoop.mapreduce.v2.api.TaskReport r : taskReports) {
+    for (org.apache.hadoop.mapreduce.v2.api.records.TaskReport r : taskReports) {
       reports.add(fromYarn(r));
     }
     return reports;

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/MRClientProtocol.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,39 @@
+package org.apache.hadoop.mapreduce.v2.api;
+
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskResponse;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
+
+public interface MRClientProtocol {
+  public GetJobReportResponse getJobReport(GetJobReportRequest request) throws YarnRemoteException;
+  public GetTaskReportResponse getTaskReport(GetTaskReportRequest request) throws YarnRemoteException;
+  public GetTaskAttemptReportResponse getTaskAttemptReport(GetTaskAttemptReportRequest request) throws YarnRemoteException;
+  public GetCountersResponse getCounters(GetCountersRequest request) throws YarnRemoteException;
+  public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(GetTaskAttemptCompletionEventsRequest request) throws YarnRemoteException;
+  public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request) throws YarnRemoteException;
+  public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request) throws YarnRemoteException;
+  public KillJobResponse killJob(KillJobRequest request) throws YarnRemoteException;
+  public KillTaskResponse killTask(KillTaskRequest request) throws YarnRemoteException;
+  public KillTaskAttemptResponse killTaskAttempt(KillTaskAttemptRequest request) throws YarnRemoteException;
+  public FailTaskAttemptResponse failTaskAttempt(FailTaskAttemptRequest request) throws YarnRemoteException;
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/client/MRClientProtocolPBClientImpl.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,268 @@
+package org.apache.hadoop.mapreduce.v2.api.impl.pb.client;
+
+import java.io.IOException;
+import java.lang.reflect.UndeclaredThrowableException;
+import java.net.InetSocketAddress;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.ipc.RPC;
+import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptCompletionEventsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptCompletionEventsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillJobRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillJobResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
+import org.apache.hadoop.yarn.ipc.ProtoOverHadoopRpcEngine;
+import org.apache.hadoop.yarn.proto.MRClientProtocol.MRClientProtocolService;
+
+import com.google.protobuf.ServiceException;
+
+public class MRClientProtocolPBClientImpl implements MRClientProtocol {
+
+  private MRClientProtocolService.BlockingInterface proxy;
+  
+  public MRClientProtocolPBClientImpl(long clientVersion, InetSocketAddress addr, Configuration conf) throws IOException {
+    RPC.setProtocolEngine(conf, MRClientProtocolService.BlockingInterface.class, ProtoOverHadoopRpcEngine.class);
+    proxy = (MRClientProtocolService.BlockingInterface)RPC.getProxy(
+        MRClientProtocolService.BlockingInterface.class, clientVersion, addr, conf);
+  }
+  
+  @Override
+  public GetJobReportResponse getJobReport(GetJobReportRequest request)
+      throws YarnRemoteException {
+    GetJobReportRequestProto requestProto = ((GetJobReportRequestPBImpl)request).getProto();
+    try {
+      return new GetJobReportResponsePBImpl(proxy.getJobReport(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
+      throws YarnRemoteException {
+    GetTaskReportRequestProto requestProto = ((GetTaskReportRequestPBImpl)request).getProto();
+    try {
+      return new GetTaskReportResponsePBImpl(proxy.getTaskReport(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetTaskAttemptReportResponse getTaskAttemptReport(
+      GetTaskAttemptReportRequest request) throws YarnRemoteException {
+    GetTaskAttemptReportRequestProto requestProto = ((GetTaskAttemptReportRequestPBImpl)request).getProto();
+    try {
+      return new GetTaskAttemptReportResponsePBImpl(proxy.getTaskAttemptReport(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetCountersResponse getCounters(GetCountersRequest request)
+      throws YarnRemoteException {
+    GetCountersRequestProto requestProto = ((GetCountersRequestPBImpl)request).getProto();
+    try {
+      return new GetCountersResponsePBImpl(proxy.getCounters(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
+      GetTaskAttemptCompletionEventsRequest request) throws YarnRemoteException {
+    GetTaskAttemptCompletionEventsRequestProto requestProto = ((GetTaskAttemptCompletionEventsRequestPBImpl)request).getProto();
+    try {
+      return new GetTaskAttemptCompletionEventsResponsePBImpl(proxy.getTaskAttemptCompletionEvents(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
+      throws YarnRemoteException {
+    GetTaskReportsRequestProto requestProto = ((GetTaskReportsRequestPBImpl)request).getProto();
+    try {
+      return new GetTaskReportsResponsePBImpl(proxy.getTaskReports(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
+      throws YarnRemoteException {
+    GetDiagnosticsRequestProto requestProto = ((GetDiagnosticsRequestPBImpl)request).getProto();
+    try {
+      return new GetDiagnosticsResponsePBImpl(proxy.getDiagnostics(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public KillJobResponse killJob(KillJobRequest request)
+      throws YarnRemoteException {
+    KillJobRequestProto requestProto = ((KillJobRequestPBImpl)request).getProto();
+    try {
+      return new KillJobResponsePBImpl(proxy.killJob(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public KillTaskResponse killTask(KillTaskRequest request)
+      throws YarnRemoteException {
+    KillTaskRequestProto requestProto = ((KillTaskRequestPBImpl)request).getProto();
+    try {
+      return new KillTaskResponsePBImpl(proxy.killTask(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public KillTaskAttemptResponse killTaskAttempt(KillTaskAttemptRequest request)
+      throws YarnRemoteException {
+    KillTaskAttemptRequestProto requestProto = ((KillTaskAttemptRequestPBImpl)request).getProto();
+    try {
+      return new KillTaskAttemptResponsePBImpl(proxy.killTaskAttempt(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+
+  @Override
+  public FailTaskAttemptResponse failTaskAttempt(FailTaskAttemptRequest request)
+      throws YarnRemoteException {
+    FailTaskAttemptRequestProto requestProto = ((FailTaskAttemptRequestPBImpl)request).getProto();
+    try {
+      return new FailTaskAttemptResponsePBImpl(proxy.failTaskAttempt(null, requestProto));
+    } catch (ServiceException e) {
+      if (e.getCause() instanceof YarnRemoteException) {
+        throw (YarnRemoteException)e.getCause();
+      } else if (e.getCause() instanceof UndeclaredThrowableException) {
+        throw (UndeclaredThrowableException)e.getCause();
+      } else {
+        throw new UndeclaredThrowableException(e);
+      }
+    }
+  }
+  
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/impl/pb/service/MRClientProtocolPBServiceImpl.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,218 @@
+package org.apache.hadoop.mapreduce.v2.api.impl.pb.service;
+
+import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetDiagnosticsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptCompletionEventsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskAttemptReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetTaskReportsResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillJobResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskAttemptResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskRequest;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.KillTaskResponse;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.FailTaskAttemptResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetDiagnosticsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetJobReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptCompletionEventsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptCompletionEventsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskAttemptReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportsRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetTaskReportsResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillJobRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillJobResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskAttemptResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskRequestPBImpl;
+import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.KillTaskResponsePBImpl;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.FailTaskAttemptResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetCountersResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetDiagnosticsResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetJobReportResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptCompletionEventsResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskAttemptReportResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.GetTaskReportsResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillJobResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskAttemptResponseProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskRequestProto;
+import org.apache.hadoop.mapreduce.v2.proto.MRServiceProtos.KillTaskResponseProto;
+import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
+import org.apache.hadoop.yarn.proto.MRClientProtocol.MRClientProtocolService.BlockingInterface;
+
+import com.google.protobuf.RpcController;
+import com.google.protobuf.ServiceException;
+
+public class MRClientProtocolPBServiceImpl implements BlockingInterface {
+
+  private MRClientProtocol real;
+  
+  public MRClientProtocolPBServiceImpl(MRClientProtocol impl) {
+    this.real = impl;
+  }
+  
+  @Override
+  public GetJobReportResponseProto getJobReport(RpcController controller,
+      GetJobReportRequestProto proto) throws ServiceException {
+    GetJobReportRequestPBImpl request = new GetJobReportRequestPBImpl(proto);
+    try {
+      GetJobReportResponse response = real.getJobReport(request);
+      return ((GetJobReportResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetTaskReportResponseProto getTaskReport(RpcController controller,
+      GetTaskReportRequestProto proto) throws ServiceException {
+    GetTaskReportRequest request = new GetTaskReportRequestPBImpl(proto);
+    try {
+      GetTaskReportResponse response = real.getTaskReport(request);
+      return ((GetTaskReportResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetTaskAttemptReportResponseProto getTaskAttemptReport(
+      RpcController controller, GetTaskAttemptReportRequestProto proto)
+      throws ServiceException {
+    GetTaskAttemptReportRequest request = new GetTaskAttemptReportRequestPBImpl(proto);
+    try {
+      GetTaskAttemptReportResponse response = real.getTaskAttemptReport(request);
+      return ((GetTaskAttemptReportResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetCountersResponseProto getCounters(RpcController controller,
+      GetCountersRequestProto proto) throws ServiceException {
+    GetCountersRequest request = new GetCountersRequestPBImpl(proto);
+    try {
+      GetCountersResponse response = real.getCounters(request);
+      return ((GetCountersResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetTaskAttemptCompletionEventsResponseProto getTaskAttemptCompletionEvents(
+      RpcController controller,
+      GetTaskAttemptCompletionEventsRequestProto proto)
+      throws ServiceException {
+    GetTaskAttemptCompletionEventsRequest request = new GetTaskAttemptCompletionEventsRequestPBImpl(proto);
+    try {
+      GetTaskAttemptCompletionEventsResponse response = real.getTaskAttemptCompletionEvents(request);
+      return ((GetTaskAttemptCompletionEventsResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetTaskReportsResponseProto getTaskReports(RpcController controller,
+      GetTaskReportsRequestProto proto) throws ServiceException {
+    GetTaskReportsRequest request = new GetTaskReportsRequestPBImpl(proto);
+    try {
+      GetTaskReportsResponse response = real.getTaskReports(request);
+      return ((GetTaskReportsResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public GetDiagnosticsResponseProto getDiagnostics(RpcController controller,
+      GetDiagnosticsRequestProto proto) throws ServiceException {
+    GetDiagnosticsRequest request = new GetDiagnosticsRequestPBImpl(proto);
+    try {
+      GetDiagnosticsResponse response = real.getDiagnostics(request);
+      return ((GetDiagnosticsResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public KillJobResponseProto killJob(RpcController controller,
+      KillJobRequestProto proto) throws ServiceException {
+    KillJobRequest request = new KillJobRequestPBImpl(proto);
+    try {
+      KillJobResponse response = real.killJob(request);
+      return ((KillJobResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public KillTaskResponseProto killTask(RpcController controller,
+      KillTaskRequestProto proto) throws ServiceException {
+    KillTaskRequest request = new KillTaskRequestPBImpl(proto);
+    try {
+      KillTaskResponse response = real.killTask(request);
+      return ((KillTaskResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public KillTaskAttemptResponseProto killTaskAttempt(RpcController controller,
+      KillTaskAttemptRequestProto proto) throws ServiceException {
+    KillTaskAttemptRequest request = new KillTaskAttemptRequestPBImpl(proto);
+    try {
+      KillTaskAttemptResponse response = real.killTaskAttempt(request);
+      return ((KillTaskAttemptResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+
+  @Override
+  public FailTaskAttemptResponseProto failTaskAttempt(RpcController controller,
+      FailTaskAttemptRequestProto proto) throws ServiceException {
+    FailTaskAttemptRequest request = new FailTaskAttemptRequestPBImpl(proto);
+    try {
+      FailTaskAttemptResponse response = real.failTaskAttempt(request);
+      return ((FailTaskAttemptResponsePBImpl)response).getProto();
+    } catch (YarnRemoteException e) {
+      throw new ServiceException(e);
+    }
+  }
+  
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptRequest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptRequest.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptRequest.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptRequest.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,9 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+
+public interface FailTaskAttemptRequest {
+  public abstract TaskAttemptId getTaskAttemptId();
+  
+  public abstract void setTaskAttemptId(TaskAttemptId taskAttemptId);
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptResponse.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptResponse.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptResponse.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/FailTaskAttemptResponse.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,5 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+public interface FailTaskAttemptResponse {
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersRequest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersRequest.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersRequest.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersRequest.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,10 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+
+public interface GetCountersRequest {
+  public abstract JobId getJobId();
+  
+  public abstract void setJobId(JobId jobId);
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersResponse.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersResponse.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersResponse.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetCountersResponse.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,9 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+
+public interface GetCountersResponse {
+  public abstract Counters getCounters();
+  
+  public abstract void setCounters(Counters counters);
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsRequest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsRequest.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsRequest.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsRequest.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,9 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+
+public interface GetDiagnosticsRequest {
+  public abstract TaskAttemptId getTaskAttemptId();
+  
+  public abstract void setTaskAttemptId(TaskAttemptId taskAttemptId);
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsResponse.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsResponse.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsResponse.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetDiagnosticsResponse.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,15 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import java.util.List;
+
+public interface GetDiagnosticsResponse {
+  public abstract List<String> getDiagnosticsList();
+  public abstract String getDiagnostics(int index);
+  public abstract int getDiagnosticsCount();
+  
+  public abstract void addAllDiagnostics(List<String> diagnostics);
+  public abstract void addDiagnostics(String diagnostic);
+  public abstract void removeDiagnostics(int index);
+  public abstract void clearDiagnostics();
+
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportRequest.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportRequest.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportRequest.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportRequest.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,9 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+
+public interface GetJobReportRequest {
+  public abstract JobId getJobId();
+  
+  public abstract void setJobId(JobId jobId);
+}

Added: hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportResponse.java
URL: http://svn.apache.org/viewvc/hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportResponse.java?rev=1087462&view=auto
==============================================================================
--- hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportResponse.java (added)
+++ hadoop/mapreduce/branches/MR-279/mr-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/api/protocolrecords/GetJobReportResponse.java Thu Mar 31 22:23:22 2011
@@ -0,0 +1,9 @@
+package org.apache.hadoop.mapreduce.v2.api.protocolrecords;
+
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+
+public interface GetJobReportResponse {
+  public abstract JobReport getJobReport();
+  
+  public abstract void setJobReport(JobReport jobReport);
+}