You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/09/30 14:54:55 UTC

svn commit: r1177639 [1/3] - in /hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/ hadoop-mapreduce-client/hadoop-mapreduce-client-co...

Author: vinodkv
Date: Fri Sep 30 12:54:53 2011
New Revision: 1177639

URL: http://svn.apache.org/viewvc?rev=1177639&view=rev
Log:
MAPREDUCE-3098. svn merge -c r1177633 --ignore-ancestry ../../trunk/

Added:
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/FinalApplicationStatus.java
      - copied unchanged from r1177633, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/FinalApplicationStatus.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/YarnApplicationState.java
      - copied unchanged from r1177633, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/YarnApplicationState.java
Removed:
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationState.java
Modified:
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationReport.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationMasterPBImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/impl/pb/ApplicationReportPBImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/util/ProtoUtils.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_protos.proto
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/proto/yarn_service_protos.proto
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/BuilderUtils.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/MockApps.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ApplicationMasterService.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttempt.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/RMAppAttemptImpl.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/event/RMAppAttemptUnregistrationEvent.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockAM.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMLaunchFailure.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestAMRestart.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestASMStateMachine.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestApplicationMasterExpiry.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/TestSchedulerNegotiator.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/MockRMApp.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/attempt/TestRMAppAttemptTransitions.java
    hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/scheduler/TestQueueMetrics.java

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt Fri Sep 30 12:54:53 2011
@@ -295,6 +295,9 @@ Release 0.23.0 - Unreleased
     MAPREDUCE-3001. Added task-specific counters to AppMaster and JobHistory
     web-UIs. (Robert Joseph Evans via vinodkv)
 
+    MAPREDUCE-3098. Fixed RM and MR AM to report YarnApplicationState and
+    application's FinalStatus separately. (Hitesh Shah via vinodkv)
+
   OPTIMIZATIONS
 
     MAPREDUCE-2026. Make JobTracker.getJobCounters() and

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java Fri Sep 30 12:54:53 2011
@@ -46,6 +46,7 @@ import org.apache.hadoop.yarn.api.protoc
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.protocolrecords.RegisterApplicationMasterResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -75,7 +76,7 @@ public abstract class RMCommunicator ext
 
   private final RecordFactory recordFactory =
       RecordFactoryProvider.getRecordFactory(null);
-  
+
   private final AppContext context;
   private Job job;
 
@@ -146,7 +147,7 @@ public abstract class RMCommunicator ext
 
   protected void register() {
     //Register
-    String host = 
+    String host =
       clientService.getBindAddress().getAddress().getHostAddress();
     try {
       RegisterApplicationMasterRequest request =
@@ -155,7 +156,7 @@ public abstract class RMCommunicator ext
       request.setHost(host);
       request.setRpcPort(clientService.getBindAddress().getPort());
       request.setTrackingUrl(host + ":" + clientService.getHttpPort());
-      RegisterApplicationMasterResponse response = 
+      RegisterApplicationMasterResponse response =
         scheduler.registerApplicationMaster(request);
       minContainerCapability = response.getMinimumResourceCapability();
       maxContainerCapability = response.getMaximumResourceCapability();
@@ -169,29 +170,29 @@ public abstract class RMCommunicator ext
 
   protected void unregister() {
     try {
-      String finalState = "RUNNING";
+      FinalApplicationStatus finishState = FinalApplicationStatus.UNDEFINED;
       if (job.getState() == JobState.SUCCEEDED) {
-        finalState = "SUCCEEDED";
+        finishState = FinalApplicationStatus.SUCCEEDED;
       } else if (job.getState() == JobState.KILLED) {
-        finalState = "KILLED";
+        finishState = FinalApplicationStatus.KILLED;
       } else if (job.getState() == JobState.FAILED
           || job.getState() == JobState.ERROR) {
-        finalState = "FAILED";
+        finishState = FinalApplicationStatus.FAILED;
       }
       StringBuffer sb = new StringBuffer();
       for (String s : job.getDiagnostics()) {
         sb.append(s).append("\n");
       }
       LOG.info("Setting job diagnostics to " + sb.toString());
-      
-      String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(), 
+
+      String historyUrl = JobHistoryUtils.getHistoryUrl(getConfig(),
           context.getApplicationID());
       LOG.info("History url is " + historyUrl);
 
       FinishApplicationMasterRequest request =
           recordFactory.newRecordInstance(FinishApplicationMasterRequest.class);
       request.setAppAttemptId(this.applicationAttemptId);
-      request.setFinalState(finalState.toString());
+      request.setFinishApplicationStatus(finishState);
       request.setDiagnostics(sb.toString());
       request.setTrackingUrl(historyUrl);
       scheduler.finishApplicationMaster(request);
@@ -203,7 +204,7 @@ public abstract class RMCommunicator ext
   protected Resource getMinContainerCapability() {
     return minContainerCapability;
   }
-  
+
   protected Resource getMaxContainerCapability() {
     return maxContainerCapability;
   }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/TypeConverter.java Fri Sep 30 12:54:53 2011
@@ -45,7 +45,7 @@ import org.apache.hadoop.mapreduce.v2.ut
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.NodeReport;
 import org.apache.hadoop.yarn.api.records.QueueACL;
 import org.apache.hadoop.yarn.api.records.QueueState;
@@ -56,7 +56,7 @@ import org.apache.hadoop.yarn.factory.pr
 public class TypeConverter {
 
   private static RecordFactory recordFactory;
-  
+
   static {
     recordFactory = RecordFactoryProvider.getRecordFactory(null);
   }
@@ -75,7 +75,7 @@ public class TypeConverter {
   public static JobId toYarn(org.apache.hadoop.mapreduce.JobID id) {
     JobId jobId = recordFactory.newRecordInstance(JobId.class);
     jobId.setId(id.getId()); //currently there is 1-1 mapping between appid and jobid
-    
+
     ApplicationId appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setId(id.getId());
     appId.setClusterTimestamp(toClusterTimeStamp(id.getJtIdentifier()));
@@ -137,7 +137,7 @@ public class TypeConverter {
     }
     return TaskAttemptState.valueOf(state.toString());
   }
-  
+
   public static Phase toYarn(org.apache.hadoop.mapred.TaskStatus.Phase phase) {
     switch (phase) {
     case STARTING:
@@ -161,7 +161,7 @@ public class TypeConverter {
     TaskCompletionEvent[] oldEvents =
         new TaskCompletionEvent[newEvents.length];
     int i = 0;
-    for (TaskAttemptCompletionEvent newEvent 
+    for (TaskAttemptCompletionEvent newEvent
         : newEvents) {
       oldEvents[i++] = fromYarn(newEvent);
     }
@@ -215,19 +215,19 @@ public class TypeConverter {
     taskAttemptId.setId(id.getId());
     return taskAttemptId;
   }
-  
+
   public static org.apache.hadoop.mapreduce.Counters fromYarn(
       Counters yCntrs) {
     if (yCntrs == null) {
       return null;
     }
-    org.apache.hadoop.mapreduce.Counters counters = 
+    org.apache.hadoop.mapreduce.Counters counters =
       new org.apache.hadoop.mapreduce.Counters();
     for (CounterGroup yGrp : yCntrs.getAllCounterGroups().values()) {
       counters.addGroup(yGrp.getName(), yGrp.getDisplayName());
       for (Counter yCntr : yGrp.getAllCounters().values()) {
-        org.apache.hadoop.mapreduce.Counter c = 
-          counters.findCounter(yGrp.getName(), 
+        org.apache.hadoop.mapreduce.Counter c =
+          counters.findCounter(yGrp.getName(),
               yCntr.getName());
         c.setValue(yCntr.getValue());
       }
@@ -292,16 +292,16 @@ public class TypeConverter {
     jobStatus.setFailureInfo(jobreport.getDiagnostics());
     return jobStatus;
   }
-  
+
   public static org.apache.hadoop.mapreduce.QueueState fromYarn(
       QueueState state) {
-    org.apache.hadoop.mapreduce.QueueState qState = 
+    org.apache.hadoop.mapreduce.QueueState qState =
       org.apache.hadoop.mapreduce.QueueState.getState(
         state.toString().toLowerCase());
     return qState;
   }
 
-  
+
   public static int fromYarn(JobState state) {
     switch (state) {
     case NEW:
@@ -339,7 +339,7 @@ public class TypeConverter {
     }
     throw new YarnException("Unrecognized task state: " + state);
   }
-  
+
   public static TaskReport fromYarn(org.apache.hadoop.mapreduce.v2.api.records.TaskReport report) {
     String[] diagnostics = null;
     if (report.getDiagnosticsList() != null) {
@@ -351,14 +351,14 @@ public class TypeConverter {
     } else {
       diagnostics = new String[0];
     }
-    
-    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()), 
+
+    TaskReport rep = new TaskReport(fromYarn(report.getTaskId()),
         report.getProgress(), report.getTaskState().toString(),
       diagnostics, fromYarn(report.getTaskState()), report.getStartTime(), report.getFinishTime(),
       fromYarn(report.getCounters()));
-    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts 
+    List<org.apache.hadoop.mapreduce.TaskAttemptID> runningAtts
           = new ArrayList<org.apache.hadoop.mapreduce.TaskAttemptID>();
-    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id 
+    for (org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId id
         : report.getRunningAttemptsList()) {
       runningAtts.add(fromYarn(id));
     }
@@ -368,7 +368,7 @@ public class TypeConverter {
     }
     return rep;
   }
-  
+
   public static List<TaskReport> fromYarn(
       List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports) {
     List<TaskReport> reports = new ArrayList<TaskReport>();
@@ -377,14 +377,14 @@ public class TypeConverter {
     }
     return reports;
   }
-  
-  public static JobStatus.State fromYarn(ApplicationState state) {
+
+  public static JobStatus.State fromYarn(YarnApplicationState state) {
     switch (state) {
     case SUBMITTED:
       return State.PREP;
     case RUNNING:
       return State.RUNNING;
-    case SUCCEEDED:
+    case FINISHED:
       return State.SUCCEEDED;
     case FAILED:
       return State.FAILED;
@@ -396,7 +396,7 @@ public class TypeConverter {
 
   private static final String TT_NAME_PREFIX = "tracker_";
   public static TaskTrackerInfo fromYarn(NodeReport node) {
-    TaskTrackerInfo taskTracker = 
+    TaskTrackerInfo taskTracker =
       new TaskTrackerInfo(TT_NAME_PREFIX + node.getNodeId().toString());
     return taskTracker;
   }
@@ -417,7 +417,7 @@ public class TypeConverter {
       new JobStatus(
           TypeConverter.fromYarn(application.getApplicationId()),
           0.0f, 0.0f, 0.0f, 0.0f,
-          TypeConverter.fromYarn(application.getState()),
+          TypeConverter.fromYarn(application.getYarnApplicationState()),
           org.apache.hadoop.mapreduce.JobPriority.NORMAL,
           application.getUser(), application.getName(),
           application.getQueue(), jobFile, trackingUrl
@@ -433,7 +433,7 @@ public class TypeConverter {
     List<JobStatus> jobStatuses = new ArrayList<JobStatus>();
     for (ApplicationReport application : applications) {
       // each applicationReport has its own jobFile
-      org.apache.hadoop.mapreduce.JobID jobId = 
+      org.apache.hadoop.mapreduce.JobID jobId =
           TypeConverter.fromYarn(application.getApplicationId());
       jobStatuses.add(TypeConverter.fromYarn(application,
           MRApps.getJobFile(conf, application.getUser(), jobId)));
@@ -441,14 +441,14 @@ public class TypeConverter {
     return jobStatuses.toArray(new JobStatus[jobStatuses.size()]);
   }
 
-  
-  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo 
+
+  public static QueueInfo fromYarn(org.apache.hadoop.yarn.api.records.QueueInfo
       queueInfo, Configuration conf) {
     return new QueueInfo(queueInfo.getQueueName(),queueInfo.toString(),
         fromYarn(queueInfo.getQueueState()), TypeConverter.fromYarnApps(
         queueInfo.getApplications(), conf));
   }
-  
+
   public static QueueInfo[] fromYarnQueueInfo(
       List<org.apache.hadoop.yarn.api.records.QueueInfo> queues,
       Configuration conf) {
@@ -467,9 +467,9 @@ public class TypeConverter {
       for (QueueACL qAcl : aclInfo.getUserAcls()) {
         operations.add(qAcl.toString());
       }
-      
-      QueueAclsInfo acl = 
-        new QueueAclsInfo(aclInfo.getQueueName(), 
+
+      QueueAclsInfo acl =
+        new QueueAclsInfo(aclInfo.getQueueName(),
             operations.toArray(new String[operations.size()]));
       acls.add(acl);
     }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java Fri Sep 30 12:54:53 2011
@@ -21,7 +21,7 @@ import junit.framework.Assert;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationReportPBImpl;
@@ -35,11 +35,11 @@ public class TestTypeConverter {
   @Test
   public void testFromYarn() throws Exception {
     int appStartTime = 612354;
-    ApplicationState state = ApplicationState.RUNNING;
+    YarnApplicationState state = YarnApplicationState.RUNNING;
     ApplicationId applicationId = new ApplicationIdPBImpl();
     ApplicationReportPBImpl applicationReport = new ApplicationReportPBImpl();
     applicationReport.setApplicationId(applicationId);
-    applicationReport.setState(state);
+    applicationReport.setYarnApplicationState(state);
     applicationReport.setStartTime(appStartTime);
     applicationReport.setUser("TestTypeConverter-user");
     JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
@@ -56,7 +56,7 @@ public class TestTypeConverter {
     ApplicationReport mockReport = mock(ApplicationReport.class);
     when(mockReport.getTrackingUrl()).thenReturn("dummy-tracking-url");
     when(mockReport.getApplicationId()).thenReturn(mockAppId);
-    when(mockReport.getState()).thenReturn(ApplicationState.KILLED);
+    when(mockReport.getYarnApplicationState()).thenReturn(YarnApplicationState.KILLED);
     when(mockReport.getUser()).thenReturn("dummy-user");
     when(mockReport.getQueue()).thenReturn("dummy-queue");
     String jobFile = "dummy-path/job.xml";

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Fri Sep 30 12:54:53 2011
@@ -61,7 +61,7 @@ import org.apache.hadoop.security.token.
 import org.apache.hadoop.yarn.YarnException;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -89,7 +89,7 @@ public class ClientServiceDelegate {
   private static String UNKNOWN_USER = "Unknown User";
   private String trackingUrl;
 
-  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm, 
+  public ClientServiceDelegate(Configuration conf, ResourceMgrDelegate rm,
       JobID jobId, MRClientProtocol historyServerProxy) {
     this.conf = new Configuration(conf); // Cloning for modifying.
     // For faster redirects from AM to HS.
@@ -103,7 +103,7 @@ public class ClientServiceDelegate {
 
   // Get the instance of the NotRunningJob corresponding to the specified
   // user and state
-  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport, 
+  private NotRunningJob getNotRunningJob(ApplicationReport applicationReport,
       JobState state) {
     synchronized (notRunningJobs) {
       HashMap<String, NotRunningJob> map = notRunningJobs.get(state);
@@ -111,8 +111,8 @@ public class ClientServiceDelegate {
         map = new HashMap<String, NotRunningJob>();
         notRunningJobs.put(state, map);
       }
-      String user = 
-          (applicationReport == null) ? 
+      String user =
+          (applicationReport == null) ?
               UNKNOWN_USER : applicationReport.getUser();
       NotRunningJob notRunningJob = map.get(user);
       if (notRunningJob == null) {
@@ -135,7 +135,7 @@ public class ClientServiceDelegate {
       trackingUrl = application.getTrackingUrl();
     }
     String serviceAddr = null;
-    while (application == null || ApplicationState.RUNNING.equals(application.getState())) {
+    while (application == null || YarnApplicationState.RUNNING.equals(application.getYarnApplicationState())) {
       if (application == null) {
         LOG.info("Could not get Job info from RM for job " + jobId
             + ". Redirecting to job history server.");
@@ -145,8 +145,8 @@ public class ClientServiceDelegate {
         if (application.getHost() == null || "".equals(application.getHost())) {
           LOG.debug("AM not assigned to Job. Waiting to get the AM ...");
           Thread.sleep(2000);
-   
-          LOG.debug("Application state is " + application.getState());
+
+          LOG.debug("Application state is " + application.getYarnApplicationState());
           application = rm.getApplicationReport(appId);
           continue;
         }
@@ -168,7 +168,7 @@ public class ClientServiceDelegate {
         //possibly the AM has crashed
         //there may be some time before AM is restarted
         //keep retrying by getting the address from RM
-        LOG.info("Could not connect to " + serviceAddr + 
+        LOG.info("Could not connect to " + serviceAddr +
         ". Waiting for getting the latest AM address...");
         try {
           Thread.sleep(2000);
@@ -189,35 +189,36 @@ public class ClientServiceDelegate {
     }
 
     /** we just want to return if its allocating, so that we don't
-     * block on it. This is to be able to return job status 
+     * block on it. This is to be able to return job status
      * on an allocating Application.
      */
-    
+
     String user = application.getUser();
     if (user == null) {
       throw RPCUtil.getRemoteException("User is not set in the application report");
     }
-    if (application.getState() == ApplicationState.NEW ||
-        application.getState() == ApplicationState.SUBMITTED) {
+    if (application.getYarnApplicationState() == YarnApplicationState.NEW ||
+        application.getYarnApplicationState() == YarnApplicationState.SUBMITTED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.NEW);
     }
-    
-    if (application.getState() == ApplicationState.FAILED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.FAILED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.FAILED);
     }
-    
-    if (application.getState() == ApplicationState.KILLED) {
+
+    if (application.getYarnApplicationState() == YarnApplicationState.KILLED) {
       realProxy = null;
       return getNotRunningJob(application, JobState.KILLED);
     }
-    
-    //History server can serve a job only if application 
+
+    //History server can serve a job only if application
     //succeeded.
-    if (application.getState() == ApplicationState.SUCCEEDED) {
-      LOG.info("Application state is completed. " +
-          "Redirecting to job history server");
+    if (application.getYarnApplicationState() == YarnApplicationState.FINISHED) {
+      LOG.info("Application state is completed. FinalApplicationStatus="
+          + application.getFinalApplicationStatus().toString()
+          + ". Redirecting to job history server");
       realProxy = checkAndGetHSProxy(application, JobState.SUCCEEDED);
     }
     return realProxy;
@@ -241,7 +242,7 @@ public class ClientServiceDelegate {
         Configuration myConf = new Configuration(conf);
         myConf.setClass(
             YarnConfiguration.YARN_SECURITY_INFO,
-            SchedulerSecurityInfo.class, SecurityInfo.class); 
+            SchedulerSecurityInfo.class, SecurityInfo.class);
         YarnRPC rpc = YarnRPC.create(myConf);
         return (MRClientProtocol) rpc.getProxy(MRClientProtocol.class,
             NetUtils.createSocketAddr(serviceAddr), myConf);
@@ -250,7 +251,7 @@ public class ClientServiceDelegate {
     LOG.trace("Connected to ApplicationMaster at: " + serviceAddr);
   }
 
-  private synchronized Object invoke(String method, Class argClass, 
+  private synchronized Object invoke(String method, Class argClass,
       Object args) throws YarnRemoteException {
     Method methodOb = null;
     try {
@@ -289,10 +290,10 @@ public class ClientServiceDelegate {
     org.apache.hadoop.mapreduce.v2.api.records.JobId jobID = TypeConverter.toYarn(arg0);
       GetCountersRequest request = recordFactory.newRecordInstance(GetCountersRequest.class);
       request.setJobId(jobID);
-      Counters cnt = ((GetCountersResponse) 
+      Counters cnt = ((GetCountersResponse)
           invoke("getCounters", GetCountersRequest.class, request)).getCounters();
       return TypeConverter.fromYarn(cnt);
-      
+
   }
 
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1, int arg2)
@@ -304,7 +305,7 @@ public class ClientServiceDelegate {
     request.setJobId(jobID);
     request.setFromEventId(arg1);
     request.setMaxEvents(arg2);
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list = 
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptCompletionEvent> list =
       ((GetTaskAttemptCompletionEventsResponse) invoke(
         "getTaskAttemptCompletionEvents", GetTaskAttemptCompletionEventsRequest.class, request)).
         getCompletionEventList();
@@ -332,12 +333,12 @@ public class ClientServiceDelegate {
   }
   
   public JobStatus getJobStatus(JobID oldJobID) throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
-    GetJobReportRequest request = 
+    GetJobReportRequest request =
         recordFactory.newRecordInstance(GetJobReportRequest.class);
     request.setJobId(jobId);
-    JobReport report = ((GetJobReportResponse) invoke("getJobReport", 
+    JobReport report = ((GetJobReportResponse) invoke("getJobReport",
         GetJobReportRequest.class, request)).getJobReport();
     if (StringUtils.isEmpty(report.getJobFile())) {
       String jobFile = MRApps.getJobFile(conf, report.getUser(), oldJobID);
@@ -351,24 +352,24 @@ public class ClientServiceDelegate {
 
   public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(JobID oldJobID, TaskType taskType)
        throws YarnRemoteException, YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId = 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
       TypeConverter.toYarn(oldJobID);
-    GetTaskReportsRequest request = 
+    GetTaskReportsRequest request =
         recordFactory.newRecordInstance(GetTaskReportsRequest.class);
     request.setJobId(jobId);
     request.setTaskType(TypeConverter.toYarn(taskType));
-    
-    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports = 
-      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class, 
+
+    List<org.apache.hadoop.mapreduce.v2.api.records.TaskReport> taskReports =
+      ((GetTaskReportsResponse) invoke("getTaskReports", GetTaskReportsRequest.class,
           request)).getTaskReportList();
-    
+
     return TypeConverter.fromYarn
     (taskReports).toArray(new org.apache.hadoop.mapreduce.TaskReport[0]);
   }
 
   public boolean killTask(TaskAttemptID taskAttemptID, boolean fail)
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID 
+    org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID
       = TypeConverter.toYarn(taskAttemptID);
     if (fail) {
       FailTaskAttemptRequest failRequest = recordFactory.newRecordInstance(FailTaskAttemptRequest.class);
@@ -381,10 +382,10 @@ public class ClientServiceDelegate {
     }
     return true;
   }
-  
+
   public boolean killJob(JobID oldJobID)
        throws YarnRemoteException {
-    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId 
+    org.apache.hadoop.mapreduce.v2.api.records.JobId jobId
     = TypeConverter.toYarn(oldJobID);
     KillJobRequest killRequest = recordFactory.newRecordInstance(KillJobRequest.class);
     killRequest.setJobId(jobId);
@@ -392,5 +393,5 @@ public class ClientServiceDelegate {
     return true;
   }
 
-    
+
 }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/NotRunningJob.java Fri Sep 30 12:54:53 2011
@@ -22,8 +22,6 @@ import java.util.ArrayList;
 import java.util.HashMap;
 
 import org.apache.commons.lang.NotImplementedException;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
 import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptResponse;
@@ -55,40 +53,36 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.BuilderUtils;
 
 public class NotRunningJob implements MRClientProtocol {
 
-  private static final Log LOG = LogFactory.getLog(NotRunningJob.class);
-  
-  private RecordFactory recordFactory = 
+  private RecordFactory recordFactory =
     RecordFactoryProvider.getRecordFactory(null);
-  
+
   private final JobState jobState;
   private final ApplicationReport applicationReport;
-  
-  
+
+
   private ApplicationReport getUnknownApplicationReport() {
-    ApplicationReport unknown = 
-        recordFactory.newRecordInstance(ApplicationReport.class);
-    unknown.setUser("N/A");
-    unknown.setHost("N/A");
-    unknown.setName("N/A");
-    unknown.setQueue("N/A");
-    unknown.setStartTime(0);
-    unknown.setFinishTime(0);
-    unknown.setTrackingUrl("N/A");
-    unknown.setDiagnostics("N/A");
-    LOG.info("getUnknownApplicationReport");
-    return unknown;
+    ApplicationId unknownAppId = recordFactory.newRecordInstance(ApplicationId.class);
+
+    // Setting AppState to NEW and finalStatus to UNDEFINED as they are never used 
+    // for a non running job
+    return BuilderUtils.newApplicationReport(unknownAppId, "N/A", "N/A", "N/A", "N/A", 0, "", 
+        YarnApplicationState.NEW, "N/A", "N/A", 0, 0, FinalApplicationStatus.UNDEFINED);    
   }
-  
+
   NotRunningJob(ApplicationReport applicationReport, JobState jobState) {
-    this.applicationReport = 
-        (applicationReport ==  null) ? 
+    this.applicationReport =
+        (applicationReport ==  null) ?
             getUnknownApplicationReport() : applicationReport;
     this.jobState = jobState;
   }
@@ -96,7 +90,7 @@ public class NotRunningJob implements MR
   @Override
   public FailTaskAttemptResponse failTaskAttempt(
       FailTaskAttemptRequest request) throws YarnRemoteException {
-    FailTaskAttemptResponse resp = 
+    FailTaskAttemptResponse resp =
       recordFactory.newRecordInstance(FailTaskAttemptResponse.class);
     return resp;
   }
@@ -104,7 +98,7 @@ public class NotRunningJob implements MR
   @Override
   public GetCountersResponse getCounters(GetCountersRequest request)
       throws YarnRemoteException {
-    GetCountersResponse resp = 
+    GetCountersResponse resp =
       recordFactory.newRecordInstance(GetCountersResponse.class);
     Counters counters = recordFactory.newRecordInstance(Counters.class);
     counters.addAllCounterGroups(new HashMap<String, CounterGroup>());
@@ -115,7 +109,7 @@ public class NotRunningJob implements MR
   @Override
   public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request)
       throws YarnRemoteException {
-    GetDiagnosticsResponse resp = 
+    GetDiagnosticsResponse resp =
       recordFactory.newRecordInstance(GetDiagnosticsResponse.class);
     resp.addDiagnostics("");
     return resp;
@@ -135,7 +129,7 @@ public class NotRunningJob implements MR
     jobReport.setTrackingUrl(applicationReport.getTrackingUrl());
     jobReport.setFinishTime(applicationReport.getFinishTime());
 
-    GetJobReportResponse resp = 
+    GetJobReportResponse resp =
         recordFactory.newRecordInstance(GetJobReportResponse.class);
     resp.setJobReport(jobReport);
     return resp;
@@ -145,7 +139,7 @@ public class NotRunningJob implements MR
   public GetTaskAttemptCompletionEventsResponse getTaskAttemptCompletionEvents(
       GetTaskAttemptCompletionEventsRequest request)
       throws YarnRemoteException {
-    GetTaskAttemptCompletionEventsResponse resp = 
+    GetTaskAttemptCompletionEventsResponse resp =
       recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
     resp.addAllCompletionEvents(new ArrayList<TaskAttemptCompletionEvent>());
     return resp;
@@ -161,7 +155,7 @@ public class NotRunningJob implements MR
   @Override
   public GetTaskReportResponse getTaskReport(GetTaskReportRequest request)
       throws YarnRemoteException {
-    GetTaskReportResponse resp = 
+    GetTaskReportResponse resp =
       recordFactory.newRecordInstance(GetTaskReportResponse.class);
     TaskReport report = recordFactory.newRecordInstance(TaskReport.class);
     report.setTaskId(request.getTaskId());
@@ -176,7 +170,7 @@ public class NotRunningJob implements MR
   @Override
   public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request)
       throws YarnRemoteException {
-    GetTaskReportsResponse resp = 
+    GetTaskReportsResponse resp =
       recordFactory.newRecordInstance(GetTaskReportsResponse.class);
     resp.addAllTaskReports(new ArrayList<TaskReport>());
     return resp;
@@ -185,7 +179,7 @@ public class NotRunningJob implements MR
   @Override
   public KillJobResponse killJob(KillJobRequest request)
       throws YarnRemoteException {
-    KillJobResponse resp = 
+    KillJobResponse resp =
       recordFactory.newRecordInstance(KillJobResponse.class);
     return resp;
   }
@@ -193,7 +187,7 @@ public class NotRunningJob implements MR
   @Override
   public KillTaskResponse killTask(KillTaskRequest request)
       throws YarnRemoteException {
-    KillTaskResponse resp = 
+    KillTaskResponse resp =
       recordFactory.newRecordInstance(KillTaskResponse.class);
     return resp;
   }
@@ -201,9 +195,9 @@ public class NotRunningJob implements MR
   @Override
   public KillTaskAttemptResponse killTaskAttempt(
       KillTaskAttemptRequest request) throws YarnRemoteException {
-    KillTaskAttemptResponse resp = 
+    KillTaskAttemptResponse resp =
       recordFactory.newRecordInstance(KillTaskAttemptResponse.class);
     return resp;
   }
-  
+
 }

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/YARNRunner.java Fri Sep 30 12:54:53 2011
@@ -62,7 +62,6 @@ import org.apache.hadoop.yarn.api.Applic
 import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
 import org.apache.hadoop.yarn.api.records.LocalResource;
@@ -70,6 +69,7 @@ import org.apache.hadoop.yarn.api.record
 import org.apache.hadoop.yarn.api.records.LocalResourceVisibility;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.URL;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -99,7 +99,7 @@ public class YARNRunner implements Clien
   }
 
   /**
-   * Similar to {@link #YARNRunner(Configuration)} but allowing injecting 
+   * Similar to {@link #YARNRunner(Configuration)} but allowing injecting
    * {@link ResourceMgrDelegate}. Enables mocking and testing.
    * @param conf the configuration object for the client
    * @param resMgrDelegate the resourcemanager client handle.
@@ -107,12 +107,12 @@ public class YARNRunner implements Clien
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate) {
    this(conf, resMgrDelegate, new ClientCache(conf, resMgrDelegate));
   }
-  
+
   /**
-   * Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)} 
+   * Similar to {@link YARNRunner#YARNRunner(Configuration, ResourceMgrDelegate)}
    * but allowing injecting {@link ClientCache}. Enable mocking and testing.
    * @param conf the configuration object
-   * @param resMgrDelegate the resource manager delegate 
+   * @param resMgrDelegate the resource manager delegate
    * @param clientCache the client cache object.
    */
   public YARNRunner(Configuration conf, ResourceMgrDelegate resMgrDelegate,
@@ -126,7 +126,7 @@ public class YARNRunner implements Clien
       throw new RuntimeException("Error in instantiating YarnClient", ufe);
     }
   }
-  
+
   @Override
   public void cancelDelegationToken(Token<DelegationTokenIdentifier> arg0)
       throws IOException, InterruptedException {
@@ -152,7 +152,7 @@ public class YARNRunner implements Clien
 
   @Override
   public ClusterMetrics getClusterMetrics() throws IOException,
-      InterruptedException {  
+      InterruptedException {
     return resMgrDelegate.getClusterMetrics();
   }
 
@@ -209,13 +209,13 @@ public class YARNRunner implements Clien
   public String getSystemDir() throws IOException, InterruptedException {
     return resMgrDelegate.getSystemDir();
   }
-  
+
   @Override
   public long getTaskTrackerExpiryInterval() throws IOException,
       InterruptedException {
     return resMgrDelegate.getTaskTrackerExpiryInterval();
   }
-  
+
   @Override
   public JobStatus submitJob(JobID jobId, String jobSubmitDir, Credentials ts)
   throws IOException, InterruptedException {
@@ -230,20 +230,20 @@ public class YARNRunner implements Clien
     }
 
     // Construct necessary information to start the MR AM
-    ApplicationSubmissionContext appContext = 
+    ApplicationSubmissionContext appContext =
       createApplicationSubmissionContext(conf, jobSubmitDir, ts);
-    
+
     // Submit to ResourceManager
     ApplicationId applicationId = resMgrDelegate.submitApplication(appContext);
-    
+
     ApplicationReport appMaster = resMgrDelegate
         .getApplicationReport(applicationId);
-    String diagnostics = 
-        (appMaster == null ? 
+    String diagnostics =
+        (appMaster == null ?
             "application report is null" : appMaster.getDiagnostics());
-    if (appMaster == null || appMaster.getState() == ApplicationState.FAILED 
-        || appMaster.getState() == ApplicationState.KILLED) {
-      throw new IOException("Failed to run job : " + 
+    if (appMaster == null || appMaster.getYarnApplicationState() == YarnApplicationState.FAILED
+        || appMaster.getYarnApplicationState() == YarnApplicationState.KILLED) {
+      throw new IOException("Failed to run job : " +
         diagnostics);
     }
     return clientCache.getClient(jobId).getJobStatus(jobId);
@@ -266,7 +266,7 @@ public class YARNRunner implements Clien
       Configuration jobConf,
       String jobSubmitDir, Credentials ts) throws IOException {
     ApplicationId applicationId = resMgrDelegate.getApplicationId();
-    
+
     // Setup resource requirements
     Resource capability = recordFactory.newRecordInstance(Resource.class);
     capability.setMemory(conf.getInt(MRJobConfig.MR_AM_VMEM_MB,
@@ -276,9 +276,9 @@ public class YARNRunner implements Clien
     // Setup LocalResources
     Map<String, LocalResource> localResources =
         new HashMap<String, LocalResource>();
-    
+
     Path jobConfPath = new Path(jobSubmitDir, MRJobConfig.JOB_CONF_FILE);
-    
+
     URL yarnUrlForJobSubmitDir = ConverterUtils
         .getYarnUrlFromPath(defaultFileContext.getDefaultFileSystem()
             .resolvePath(
@@ -299,18 +299,18 @@ public class YARNRunner implements Clien
       LOG.info("Job jar is not present. "
           + "Not adding any jar to the list of resources.");
     }
-    
+
     // TODO gross hack
-    for (String s : new String[] { 
-        MRJobConfig.JOB_SPLIT, 
+    for (String s : new String[] {
+        MRJobConfig.JOB_SPLIT,
         MRJobConfig.JOB_SPLIT_METAINFO,
         MRJobConfig.APPLICATION_TOKENS_FILE }) {
       localResources.put(
           MRJobConfig.JOB_SUBMIT_DIR + "/" + s,
-          createApplicationResource(defaultFileContext, 
+          createApplicationResource(defaultFileContext,
               new Path(jobSubmitDir, s)));
     }
-    
+
     // Setup security tokens
     ByteBuffer securityTokens = null;
     if (UserGroupInformation.isSecurityEnabled()) {
@@ -322,20 +322,20 @@ public class YARNRunner implements Clien
     // Setup the command to run the AM
     Vector<CharSequence> vargs = new Vector<CharSequence>(8);
     vargs.add(Environment.JAVA_HOME.$() + "/bin/java");
-    
+
     long logSize = TaskLog.getTaskLogLength(new JobConf(conf));
     vargs.add("-Dlog4j.configuration=container-log4j.properties");
     vargs.add("-D" + MRJobConfig.TASK_LOG_DIR + "="
         + ApplicationConstants.LOG_DIR_EXPANSION_VAR);
     vargs.add("-D" + MRJobConfig.TASK_LOG_SIZE + "=" + logSize);
-    
+
     vargs.add(conf.get(MRJobConfig.MR_AM_COMMAND_OPTS,
         MRJobConfig.DEFAULT_MR_AM_COMMAND_OPTS));
 
     vargs.add(MRJobConfig.APPLICATION_MASTER_CLASS);
-    vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + 
+    vargs.add("1>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
         Path.SEPARATOR + ApplicationConstants.STDOUT);
-    vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR + 
+    vargs.add("2>" + ApplicationConstants.LOG_DIR_EXPANSION_VAR +
         Path.SEPARATOR + ApplicationConstants.STDERR);
 
 
@@ -349,12 +349,12 @@ public class YARNRunner implements Clien
 
     LOG.info("Command to launch container for ApplicationMaster is : "
         + mergedCommand);
-    
-    // Setup the CLASSPATH in environment 
+
+    // Setup the CLASSPATH in environment
     // i.e. add { job jar, CWD, Hadoop jars} to classpath.
     Map<String, String> environment = new HashMap<String, String>();
     MRApps.setClasspath(environment);
-    
+
     // Parse distributed cache
     MRApps.setupDistributedCache(jobConf, localResources);
 
@@ -374,12 +374,12 @@ public class YARNRunner implements Clien
     appContext.setUser(                                        // User name
         UserGroupInformation.getCurrentUser().getShortUserName());
     appContext.setQueue(                                       // Queue name
-        jobConf.get(JobContext.QUEUE_NAME,     
+        jobConf.get(JobContext.QUEUE_NAME,
         YarnConfiguration.DEFAULT_QUEUE_NAME));
     appContext.setApplicationName(                             // Job name
-        jobConf.get(JobContext.JOB_NAME, 
-        YarnConfiguration.DEFAULT_APPLICATION_NAME));              
-    appContext.setAMContainerSpec(amContainer);         // AM Container 
+        jobConf.get(JobContext.JOB_NAME,
+        YarnConfiguration.DEFAULT_APPLICATION_NAME));
+    appContext.setAMContainerSpec(amContainer);         // AM Container
 
     return appContext;
   }
@@ -394,14 +394,14 @@ public class YARNRunner implements Clien
   public long getProtocolVersion(String arg0, long arg1) throws IOException {
     return resMgrDelegate.getProtocolVersion(arg0, arg1);
   }
-  
+
   @Override
   public long renewDelegationToken(Token<DelegationTokenIdentifier> arg0)
       throws IOException, InterruptedException {
     return resMgrDelegate.renewDelegationToken(arg0);
   }
 
-  
+
   @Override
   public Counters getJobCounters(JobID arg0) throws IOException,
       InterruptedException {
@@ -419,7 +419,7 @@ public class YARNRunner implements Clien
     JobStatus status = clientCache.getClient(jobID).getJobStatus(jobID);
     return status;
   }
-  
+
   @Override
   public TaskCompletionEvent[] getTaskCompletionEvents(JobID arg0, int arg1,
       int arg2) throws IOException, InterruptedException {
@@ -446,8 +446,8 @@ public class YARNRunner implements Clien
     if (status.getState() != JobStatus.State.RUNNING) {
       resMgrDelegate.killApplication(TypeConverter.toYarn(arg0).getAppId());
       return;
-    } 
-    
+    }
+
     try {
       /* send a kill to the AM */
       clientCache.getClient(arg0).killJob(arg0);

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientRedirect.java Fri Sep 30 12:54:53 2011
@@ -88,7 +88,8 @@ import org.apache.hadoop.yarn.api.protoc
 import org.apache.hadoop.yarn.api.protocolrecords.SubmitApplicationResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.factories.RecordFactory;
@@ -107,17 +108,17 @@ public class TestClientRedirect {
   private static final Log LOG = LogFactory.getLog(TestClientRedirect.class);
   private static final String RMADDRESS = "0.0.0.0:8054";
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-  
+
   private static final String AMHOSTADDRESS = "0.0.0.0:10020";
   private static final String HSHOSTADDRESS = "0.0.0.0:10021";
-  private volatile boolean amContact = false; 
+  private volatile boolean amContact = false;
   private volatile boolean hsContact = false;
   private volatile boolean amRunning = false;
   private volatile boolean amRestarting = false;
 
   @Test
   public void testRedirect() throws Exception {
-    
+
     Configuration conf = new YarnConfiguration();
     conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
     conf.set(YarnConfiguration.RM_ADDRESS, RMADDRESS);
@@ -125,7 +126,7 @@ public class TestClientRedirect {
     RMService rmService = new RMService("test");
     rmService.init(conf);
     rmService.start();
-  
+
     AMService amService = new AMService();
     amService.init(conf);
     amService.start(conf);
@@ -134,16 +135,16 @@ public class TestClientRedirect {
     HistoryService historyService = new HistoryService();
     historyService.init(conf);
     historyService.start(conf);
-  
+
     LOG.info("services started");
     Cluster cluster = new Cluster(conf);
     org.apache.hadoop.mapreduce.JobID jobID =
       new org.apache.hadoop.mapred.JobID("201103121733", 1);
-    org.apache.hadoop.mapreduce.Counters counters = 
+    org.apache.hadoop.mapreduce.Counters counters =
         cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     Assert.assertTrue(amContact);
-   
+
     LOG.info("Sleeping for 5 seconds before stop for" +
     " the client socket to not get EOF immediately..");
     Thread.sleep(5000);
@@ -155,17 +156,17 @@ public class TestClientRedirect {
     LOG.info("Sleeping for 5 seconds after stop for" +
     		" the server to exit cleanly..");
     Thread.sleep(5000);
-    
+
     amRestarting = true;
     // Same client
     //results are returned from fake (not started job)
     counters = cluster.getJob(jobID).getCounters();
     Assert.assertEquals(0, counters.countCounters());
     Job job = cluster.getJob(jobID);
-    org.apache.hadoop.mapreduce.TaskID taskId = 
+    org.apache.hadoop.mapreduce.TaskID taskId =
       new org.apache.hadoop.mapreduce.TaskID(jobID, TaskType.MAP, 0);
     TaskAttemptID tId = new TaskAttemptID(taskId, 0);
-    
+
     //invoke all methods to check that no exception is thrown
     job.killJob();
     job.killTask(tId);
@@ -175,25 +176,25 @@ public class TestClientRedirect {
     job.getTaskDiagnostics(tId);
     job.getTaskReports(TaskType.MAP);
     job.getTrackingURL();
-    
+
     amRestarting = false;
     amService = new AMService();
     amService.init(conf);
     amService.start(conf);
     amRunning = true;
     amContact = false; //reset
-    
+
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     Assert.assertTrue(amContact);
-    
+
     amRunning = false;
 
     // Same client
     counters = cluster.getJob(jobID).getCounters();
     validateCounters(counters);
     Assert.assertTrue(hsContact);
-    
+
     rmService.stop();
     historyService.stop();
   }
@@ -248,7 +249,7 @@ public class TestClientRedirect {
     public GetNewApplicationResponse getNewApplication(GetNewApplicationRequest request) throws YarnRemoteException {
       return null;
     }
-    
+
     @Override
     public GetApplicationReportResponse getApplicationReport(
         GetApplicationReportRequest request) throws YarnRemoteException {
@@ -256,12 +257,14 @@ public class TestClientRedirect {
       ApplicationReport application = recordFactory
           .newRecordInstance(ApplicationReport.class);
       application.setApplicationId(applicationId);
+      application.setFinalApplicationStatus(FinalApplicationStatus.UNDEFINED);
       if (amRunning) {
-        application.setState(ApplicationState.RUNNING);
+        application.setYarnApplicationState(YarnApplicationState.RUNNING);
       } else if (amRestarting) {
-        application.setState(ApplicationState.SUBMITTED);
+        application.setYarnApplicationState(YarnApplicationState.SUBMITTED);
       } else {
-        application.setState(ApplicationState.SUCCEEDED);
+        application.setYarnApplicationState(YarnApplicationState.FINISHED);
+        application.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
       }
       String[] split = AMHOSTADDRESS.split(":");
       application.setHost(split[0]);
@@ -339,7 +342,7 @@ public class TestClientRedirect {
    }
   }
 
-  class AMService extends AbstractService 
+  class AMService extends AbstractService
       implements MRClientProtocol {
     private InetSocketAddress bindAddress;
     private Server server;
@@ -347,7 +350,7 @@ public class TestClientRedirect {
     public AMService() {
       this(AMHOSTADDRESS);
     }
-    
+
     public AMService(String hostAddress) {
       super("AMService");
       this.hostAddress = hostAddress;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java Fri Sep 30 12:54:53 2011
@@ -32,8 +32,9 @@ import org.apache.hadoop.mapreduce.v2.ap
 import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetJobReportResponse;
 import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.api.records.JobState;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
 import org.apache.hadoop.yarn.ipc.RPCUtil;
@@ -163,7 +164,7 @@ public class TestClientServiceDelegate {
   private ApplicationReport getApplicationReport() {
     ApplicationReport applicationReport = Records
         .newRecord(ApplicationReport.class);
-    applicationReport.setState(ApplicationState.SUCCEEDED);
+    applicationReport.setYarnApplicationState(YarnApplicationState.FINISHED);
     applicationReport.setUser("root");
     applicationReport.setHost("N/A");
     applicationReport.setName("N/A");
@@ -172,6 +173,7 @@ public class TestClientServiceDelegate {
     applicationReport.setFinishTime(0);
     applicationReport.setTrackingUrl("N/A");
     applicationReport.setDiagnostics("N/A");
+    applicationReport.setFinalApplicationStatus(FinalApplicationStatus.SUCCEEDED);
     return applicationReport;
   }
 

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestMRJobsWithHistoryService.java Fri Sep 30 12:54:53 2011
@@ -36,7 +36,6 @@ import org.apache.hadoop.mapreduce.Count
 import org.apache.hadoop.mapreduce.Job;
 import org.apache.hadoop.mapreduce.TypeConverter;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMAppState;
 import org.junit.Before;
 import org.junit.After;

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestYARNRunner.java Fri Sep 30 12:54:53 2011
@@ -64,10 +64,10 @@ import org.apache.hadoop.yarn.api.protoc
 import org.apache.hadoop.yarn.api.protocolrecords.GetQueueUserAclsInfoResponse;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.api.records.ApplicationReport;
-import org.apache.hadoop.yarn.api.records.ApplicationState;
 import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
 import org.apache.hadoop.yarn.api.records.QueueInfo;
 import org.apache.hadoop.yarn.api.records.YarnClusterMetrics;
+import org.apache.hadoop.yarn.api.records.YarnApplicationState;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -77,25 +77,25 @@ import org.mockito.invocation.Invocation
 import org.mockito.stubbing.Answer;
 
 /**
- * Test YarnRunner and make sure the client side plugin works 
+ * Test YarnRunner and make sure the client side plugin works
  * fine
  */
 public class TestYARNRunner extends TestCase {
   private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
   private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
- 
+
   private YARNRunner yarnRunner;
   private ResourceMgrDelegate resourceMgrDelegate;
   private YarnConfiguration conf;
   private ClientCache clientCache;
   private ApplicationId appId;
   private JobID jobId;
-  private File testWorkDir = 
+  private File testWorkDir =
       new File("target", TestYARNRunner.class.getName());
   private ApplicationSubmissionContext submissionContext;
   private  ClientServiceDelegate clientDelegate;
   private static final String failString = "Rejected job";
- 
+
   @Before
   public void setUp() throws Exception {
     resourceMgrDelegate = mock(ResourceMgrDelegate.class);
@@ -115,7 +115,7 @@ public class TestYARNRunner extends Test
         }
         ).when(yarnRunner).createApplicationSubmissionContext(any(Configuration.class),
             any(String.class), any(Credentials.class));
-    
+
     appId = recordFactory.newRecordInstance(ApplicationId.class);
     appId.setClusterTimestamp(System.currentTimeMillis());
     appId.setId(1);
@@ -125,13 +125,13 @@ public class TestYARNRunner extends Test
     }
     testWorkDir.mkdirs();
    }
-  
-  
+
+
   @Test
   public void testJobKill() throws Exception {
     clientDelegate = mock(ClientServiceDelegate.class);
-    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new 
-        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f, 
+    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
             State.PREP, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
     when(clientDelegate.killJob(any(JobID.class))).thenReturn(true);
     doAnswer(
@@ -145,13 +145,13 @@ public class TestYARNRunner extends Test
         ).when(clientCache).getClient(any(JobID.class));
     yarnRunner.killJob(jobId);
     verify(resourceMgrDelegate).killApplication(appId);
-    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new 
-        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f, 
+    when(clientDelegate.getJobStatus(any(JobID.class))).thenReturn(new
+        org.apache.hadoop.mapreduce.JobStatus(jobId, 0f, 0f, 0f, 0f,
             State.RUNNING, JobPriority.HIGH, "tmp", "tmp", "tmp", "tmp"));
     yarnRunner.killJob(jobId);
     verify(clientDelegate).killJob(jobId);
   }
-  
+
   @Test
   public void testJobSubmissionFailure() throws Exception {
     when(resourceMgrDelegate.submitApplication(any(ApplicationSubmissionContext.class))).
@@ -159,7 +159,7 @@ public class TestYARNRunner extends Test
     ApplicationReport report = mock(ApplicationReport.class);
     when(report.getApplicationId()).thenReturn(appId);
     when(report.getDiagnostics()).thenReturn(failString);
-    when(report.getState()).thenReturn(ApplicationState.FAILED);
+    when(report.getYarnApplicationState()).thenReturn(YarnApplicationState.FAILED);
     when(resourceMgrDelegate.getApplicationReport(appId)).thenReturn(report);
     Credentials credentials = new Credentials();
     File jobxml = new File(testWorkDir, "job.xml");
@@ -167,13 +167,13 @@ public class TestYARNRunner extends Test
     conf.writeXml(out);
     out.close();
     try {
-      yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials); 
+      yarnRunner.submitJob(jobId, testWorkDir.getAbsolutePath().toString(), credentials);
     } catch(IOException io) {
       LOG.info("Logging exception:", io);
       assertTrue(io.getLocalizedMessage().contains(failString));
     }
   }
-  
+
   @Test
   public void testResourceMgrDelegate() throws Exception {
     /* we not want a mock of resourcemgr deleagte */
@@ -184,19 +184,19 @@ public class TestYARNRunner extends Test
     .thenReturn(null);
     delegate.killApplication(appId);
     verify(clientRMProtocol).forceKillApplication(any(KillApplicationRequest.class));
-    
+
     /* make sure getalljobs calls get all applications */
     when(clientRMProtocol.getAllApplications(any(GetAllApplicationsRequest.class))).
     thenReturn(recordFactory.newRecordInstance(GetAllApplicationsResponse.class));
     delegate.getAllJobs();
     verify(clientRMProtocol).getAllApplications(any(GetAllApplicationsRequest.class));
-    
+
     /* make sure getapplication report is called */
     when(clientRMProtocol.getApplicationReport(any(GetApplicationReportRequest.class)))
     .thenReturn(recordFactory.newRecordInstance(GetApplicationReportResponse.class));
     delegate.getApplicationReport(appId);
     verify(clientRMProtocol).getApplicationReport(any(GetApplicationReportRequest.class));
-    
+
     /* make sure metrics is called */
     GetClusterMetricsResponse clusterMetricsResponse = recordFactory.newRecordInstance
         (GetClusterMetricsResponse.class);
@@ -206,7 +206,7 @@ public class TestYARNRunner extends Test
     .thenReturn(clusterMetricsResponse);
     delegate.getClusterMetrics();
     verify(clientRMProtocol).getClusterMetrics(any(GetClusterMetricsRequest.class));
-    
+
     when(clientRMProtocol.getClusterNodes(any(GetClusterNodesRequest.class))).
     thenReturn(recordFactory.newRecordInstance(GetClusterNodesResponse.class));
     delegate.getActiveTrackers();
@@ -227,7 +227,7 @@ public class TestYARNRunner extends Test
     thenReturn(queueInfoResponse);
     delegate.getQueues();
     verify(clientRMProtocol).getQueueInfo(any(GetQueueInfoRequest.class));
-    
+
     GetQueueUserAclsInfoResponse aclResponse = recordFactory.newRecordInstance(
         GetQueueUserAclsInfoResponse.class);
     when(clientRMProtocol.getQueueUserAcls(any(GetQueueUserAclsInfoRequest.class)))

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/FinishApplicationMasterRequest.java Fri Sep 30 12:54:53 2011
@@ -22,15 +22,16 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability.Stable;
 import org.apache.hadoop.yarn.api.AMRMProtocol;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 
 /**
- * <p>The finalization request sent by the <code>ApplicationMaster</code> to 
+ * <p>The finalization request sent by the <code>ApplicationMaster</code> to
  * inform the <code>ResourceManager</code> about its completion.</p>
- * 
+ *
  * <p>The final request includes details such:
  *   <ul>
  *     <li>
- *         {@link ApplicationAttemptId} being managed by the 
+ *         {@link ApplicationAttemptId} being managed by the
  *         <code>ApplicationMaster</code>
  *     </li>
  *     <li>Final state of the <code>ApplicationMaster</code></li>
@@ -47,19 +48,19 @@ import org.apache.hadoop.yarn.api.record
 public interface FinishApplicationMasterRequest {
 
   /**
-   * Get the <code>ApplicationAttemptId</code> being managed by the 
+   * Get the <code>ApplicationAttemptId</code> being managed by the
    * <code>ApplicationMaster</code>.
-   * @return <code>ApplicationAttemptId</code> being managed by the 
+   * @return <code>ApplicationAttemptId</code> being managed by the
    *         <code>ApplicationMaster</code>
    */
   @Public
   @Stable
   ApplicationAttemptId getApplicationAttemptId();
-  
+
   /**
-   * Set the <code>ApplicationAttemptId</code> being managed by the 
+   * Set the <code>ApplicationAttemptId</code> being managed by the
    * <code>ApplicationMaster</code>.
-   * @param applicationAttemptId <code>ApplicationAttemptId</code> being managed 
+   * @param applicationAttemptId <code>ApplicationAttemptId</code> being managed
    *                             by the <code>ApplicationMaster</code>
    */
   @Public
@@ -72,15 +73,15 @@ public interface FinishApplicationMaster
    */
   @Public
   @Stable
-  String getFinalState();
-  
+  FinalApplicationStatus getFinalApplicationStatus();
+
   /**
-   * Set <em>final state</em> of the <code>ApplicationMaster</code>
-   * @param finalState <em>final state</em> of the <code>ApplicationMaster</code>
+   * Set the <em>finish state</em> of the <code>ApplicationMaster</code>
+   * @param finishState <em>finish state</em> of the <code>ApplicationMaster</code>
    */
   @Public
   @Stable
-  void setFinalState(String finalState);
+  void setFinishApplicationStatus(FinalApplicationStatus finishState);
 
   /**
    * Get <em>diagnostic information</em> on application failure.
@@ -89,7 +90,7 @@ public interface FinishApplicationMaster
   @Public
   @Stable
   String getDiagnostics();
-  
+
   /**
    * Set <em>diagnostic information</em> on application failure.
    * @param diagnostics <em>diagnostic information</em> on application failure
@@ -105,10 +106,10 @@ public interface FinishApplicationMaster
   @Public
   @Stable
   String getTrackingUrl();
-  
+
   /**
    * Set the <em>tracking URL</em>for the <code>ApplicationMaster</code>
-   * @param url <em>tracking URL</em>for the 
+   * @param url <em>tracking URL</em>for the
    *                   <code>ApplicationMaster</code>
    */
   @Public

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/FinishApplicationMasterRequestPBImpl.java Fri Sep 30 12:54:53 2011
@@ -21,23 +21,24 @@ package org.apache.hadoop.yarn.api.proto
 
 import org.apache.hadoop.yarn.api.protocolrecords.FinishApplicationMasterRequest;
 import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationAttemptIdPBImpl;
 import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationAttemptIdProto;
+import org.apache.hadoop.yarn.proto.YarnProtos.FinalApplicationStatusProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProtoOrBuilder;
-import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterRequestProtoOrBuilder;
+import org.apache.hadoop.yarn.util.ProtoUtils;
 
 
-    
 public class FinishApplicationMasterRequestPBImpl extends ProtoBase<FinishApplicationMasterRequestProto> implements FinishApplicationMasterRequest {
   FinishApplicationMasterRequestProto proto = FinishApplicationMasterRequestProto.getDefaultInstance();
   FinishApplicationMasterRequestProto.Builder builder = null;
   boolean viaProto = false;
-  
+
   private ApplicationAttemptId appAttemptId = null;
-  
-  
+
+
   public FinishApplicationMasterRequestPBImpl() {
     builder = FinishApplicationMasterRequestProto.newBuilder();
   }
@@ -46,7 +47,7 @@ public class FinishApplicationMasterRequ
     this.proto = proto;
     viaProto = true;
   }
-  
+
   public FinishApplicationMasterRequestProto getProto() {
       mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
@@ -61,7 +62,7 @@ public class FinishApplicationMasterRequ
   }
 
   private void mergeLocalToProto() {
-    if (viaProto) 
+    if (viaProto)
       maybeInitBuilder();
     mergeLocalToBuilder();
     proto = builder.build();
@@ -74,8 +75,7 @@ public class FinishApplicationMasterRequ
     }
     viaProto = false;
   }
-    
-  
+
   @Override
   public ApplicationAttemptId getApplicationAttemptId() {
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
@@ -92,7 +92,7 @@ public class FinishApplicationMasterRequ
   @Override
   public void setAppAttemptId(ApplicationAttemptId applicationAttemptId) {
     maybeInitBuilder();
-    if (applicationAttemptId == null) 
+    if (applicationAttemptId == null)
       builder.clearApplicationAttemptId();
     this.appAttemptId = applicationAttemptId;
   }
@@ -122,15 +122,22 @@ public class FinishApplicationMasterRequ
   }
 
   @Override
-  public String getFinalState() {
+  public FinalApplicationStatus getFinalApplicationStatus() {
     FinishApplicationMasterRequestProtoOrBuilder p = viaProto ? proto : builder;
-    return p.getFinalState();
+    if (!p.hasFinalApplicationStatus()) {
+      return null;
+    }	
+    return convertFromProtoFormat(p.getFinalApplicationStatus());
   }
 
   @Override
-  public void setFinalState(String state) {
+  public void setFinishApplicationStatus(FinalApplicationStatus finishState) {
     maybeInitBuilder();
-    builder.setFinalState(state);
+    if (finishState == null) {
+      builder.clearFinalApplicationStatus();
+      return;
+    }
+    builder.setFinalApplicationStatus(convertToProtoFormat(finishState));
   }
 
   private ApplicationAttemptIdPBImpl convertFromProtoFormat(ApplicationAttemptIdProto p) {
@@ -141,6 +148,13 @@ public class FinishApplicationMasterRequ
     return ((ApplicationAttemptIdPBImpl)t).getProto();
   }
 
+  private FinalApplicationStatus convertFromProtoFormat(FinalApplicationStatusProto s) {
+    return ProtoUtils.convertFromProtoFormat(s);
+  }
+
+  private FinalApplicationStatusProto convertToProtoFormat(FinalApplicationStatus s) {
+    return ProtoUtils.convertToProtoFormat(s);
+  }
 
 
-}  
+}

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/protocolrecords/impl/pb/RegisterApplicationMasterResponsePBImpl.java Fri Sep 30 12:54:53 2011
@@ -23,24 +23,22 @@ import org.apache.hadoop.yarn.api.protoc
 import org.apache.hadoop.yarn.api.records.ProtoBase;
 import org.apache.hadoop.yarn.api.records.Resource;
 import org.apache.hadoop.yarn.api.records.impl.pb.ResourcePBImpl;
-import org.apache.hadoop.yarn.proto.YarnProtos.QueueInfoProto;
 import org.apache.hadoop.yarn.proto.YarnProtos.ResourceProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProto;
 import org.apache.hadoop.yarn.proto.YarnServiceProtos.RegisterApplicationMasterResponseProtoOrBuilder;
 
 
-    
-public class RegisterApplicationMasterResponsePBImpl 
-extends ProtoBase<RegisterApplicationMasterResponseProto> 
+public class RegisterApplicationMasterResponsePBImpl
+extends ProtoBase<RegisterApplicationMasterResponseProto>
 implements RegisterApplicationMasterResponse {
-  RegisterApplicationMasterResponseProto proto = 
+  RegisterApplicationMasterResponseProto proto =
     RegisterApplicationMasterResponseProto.getDefaultInstance();
   RegisterApplicationMasterResponseProto.Builder builder = null;
   boolean viaProto = false;
-  
+
   private Resource minimumResourceCapability;
   private Resource maximumResourceCapability;
-  
+
   public RegisterApplicationMasterResponsePBImpl() {
     builder = RegisterApplicationMasterResponseProto.newBuilder();
   }
@@ -49,16 +47,16 @@ implements RegisterApplicationMasterResp
     this.proto = proto;
     viaProto = true;
   }
-  
+
   public RegisterApplicationMasterResponseProto getProto() {
     mergeLocalToProto();
     proto = viaProto ? proto : builder.build();
     viaProto = true;
     return proto;
   }
-  
+
   private void mergeLocalToProto() {
-    if (viaProto) 
+    if (viaProto)
       maybeInitBuilder();
     mergeLocalToBuilder();
     proto = builder.build();
@@ -94,7 +92,7 @@ implements RegisterApplicationMasterResp
     if (!p.hasMaximumCapability()) {
       return null;
     }
-    
+
     this.maximumResourceCapability = convertFromProtoFormat(p.getMaximumCapability());
     return this.maximumResourceCapability;
   }
@@ -109,7 +107,7 @@ implements RegisterApplicationMasterResp
     if (!p.hasMinimumCapability()) {
       return null;
     }
-    
+
     this.minimumResourceCapability = convertFromProtoFormat(p.getMinimumCapability());
     return this.minimumResourceCapability;
   }
@@ -140,4 +138,4 @@ implements RegisterApplicationMasterResp
     return ((ResourcePBImpl)resource).getProto();
   }
 
-}  
+}

Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java?rev=1177639&r1=1177638&r2=1177639&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationMaster.java Fri Sep 30 12:54:53 2011
@@ -22,38 +22,38 @@ import org.apache.hadoop.classification.
 import org.apache.hadoop.classification.InterfaceStability.Unstable;
 
 /**
- * <em>For internal use only...</em> 
+ * <em>For internal use only...</em>
  */
 @Private
 @Unstable
 public interface ApplicationMaster {
   ApplicationId getApplicationId();
   void setApplicationId(ApplicationId appId);
-  
+
   String getHost();
   void setHost(String host);
-  
+
   int getRpcPort();
   void setRpcPort(int rpcPort);
-  
+
   String getTrackingUrl();
   void setTrackingUrl(String url);
-  
+
   ApplicationStatus getStatus();
   void setStatus(ApplicationStatus status);
-  
-  ApplicationState getState();
-  void setState(ApplicationState state);
-  
+
+  YarnApplicationState getState();
+  void setState(YarnApplicationState state);
+
   String getClientToken();
   void setClientToken(String clientToken);
-  
+
   int getAMFailCount();
   void setAMFailCount(int amFailCount);
-  
+
   int getContainerCount();
   void setContainerCount(int containerCount);
-  
+
   String getDiagnostics();
   void setDiagnostics(String diagnostics);
 }