You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/10/19 07:28:23 UTC
svn commit: r1185977 [1/2] - in
/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/
hadoop-mapreduce-client/hadoop-mapreduce-client-a...
Author: vinodkv
Date: Wed Oct 19 05:28:22 2011
New Revision: 1185977
URL: http://svn.apache.org/viewvc?rev=1185977&view=rev
Log:
MAPREDUCE-3144. Augmented JobHistory with the information needed for serving aggregated logs. Contributed by Siddharth Seth.
svn merge -c r1185976 --ignore-ancestry ../../trunk/
Added:
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java
- copied unchanged from r1185976, hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/AMStartedEvent.java
Modified:
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/Recovery.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/ApplicationMaster.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/util/ConverterUtils.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerLogsPage.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ClientRMService.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManager.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/RMAppManagerSubmitEvent.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/amlauncher/AMLauncher.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMApp.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/RMAppImpl.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockNM.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestAppManager.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/TestApplicationMasterLauncher.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/MockRMApp.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/rmapp/TestRMAppTransitions.java
hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-site/src/site/apt/WritingYarnApplications.apt.vm
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/CHANGES.txt Wed Oct 19 05:28:22 2011
@@ -359,6 +359,9 @@ Release 0.23.0 - Unreleased
from the NodeManager and set MALLOC_ARENA_MAX for all daemons and
containers. (Chris Riccomini via acmurthy)
+ MAPREDUCE-3144. Augmented JobHistory with the information needed for
+ serving aggregated logs. (Siddharth Seth via vinodkv)
+
OPTIMIZATIONS
MAPREDUCE-2026. Make JobTracker.getJobCounters() and
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryEventHandler.java Wed Oct 19 05:28:22 2011
@@ -275,7 +275,7 @@ public class JobHistoryEventHandler exte
* @param jobId the jobId.
* @throws IOException
*/
- protected void setupEventWriter(JobId jobId, JobSubmittedEvent jse)
+ protected void setupEventWriter(JobId jobId)
throws IOException {
if (stagingDirPath == null) {
LOG.error("Log Directory is null, returning");
@@ -285,9 +285,6 @@ public class JobHistoryEventHandler exte
MetaInfo oldFi = fileMap.get(jobId);
Configuration conf = getConfig();
- long submitTime = oldFi == null ? jse.getSubmitTime() : oldFi
- .getJobIndexInfo().getSubmitTime();
-
// TODO Ideally this should be written out to the job dir
// (.staging/jobid/files - RecoveryService will need to be patched)
Path historyFile = JobHistoryUtils.getStagingJobHistoryFile(
@@ -301,6 +298,8 @@ public class JobHistoryEventHandler exte
String jobName = context.getJob(jobId).getName();
EventWriter writer = (oldFi == null) ? null : oldFi.writer;
+ Path logDirConfPath =
+ JobHistoryUtils.getStagingConfFile(stagingDirPath, jobId, startCount);
if (writer == null) {
try {
FSDataOutputStream out = stagingDirFS.create(historyFile, true);
@@ -312,31 +311,28 @@ public class JobHistoryEventHandler exte
+ "[" + jobName + "]");
throw ioe;
}
- }
-
- Path logDirConfPath = null;
- if (conf != null) {
- // TODO Ideally this should be written out to the job dir
- // (.staging/jobid/files - RecoveryService will need to be patched)
- logDirConfPath = JobHistoryUtils.getStagingConfFile(stagingDirPath, jobId,
- startCount);
- FSDataOutputStream jobFileOut = null;
- try {
- if (logDirConfPath != null) {
- jobFileOut = stagingDirFS.create(logDirConfPath, true);
- conf.writeXml(jobFileOut);
- jobFileOut.close();
+
+ //Write out conf only if the writer isn't already setup.
+ if (conf != null) {
+ // TODO Ideally this should be written out to the job dir
+ // (.staging/jobid/files - RecoveryService will need to be patched)
+ FSDataOutputStream jobFileOut = null;
+ try {
+ if (logDirConfPath != null) {
+ jobFileOut = stagingDirFS.create(logDirConfPath, true);
+ conf.writeXml(jobFileOut);
+ jobFileOut.close();
+ }
+ } catch (IOException e) {
+ LOG.info("Failed to write the job configuration file", e);
+ throw e;
}
- } catch (IOException e) {
- LOG.info("Failed to write the job configuration file", e);
- throw e;
}
}
-
- MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer, submitTime,
+
+ MetaInfo fi = new MetaInfo(historyFile, logDirConfPath, writer,
user, jobName, jobId);
fi.getJobSummary().setJobId(jobId);
- fi.getJobSummary().setJobSubmitTime(submitTime);
fileMap.put(jobId, fi);
}
@@ -368,11 +364,9 @@ public class JobHistoryEventHandler exte
synchronized (lock) {
// If this is JobSubmitted Event, setup the writer
- if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) {
+ if (event.getHistoryEvent().getEventType() == EventType.AM_STARTED) {
try {
- JobSubmittedEvent jobSubmittedEvent =
- (JobSubmittedEvent) event.getHistoryEvent();
- setupEventWriter(event.getJobID(), jobSubmittedEvent);
+ setupEventWriter(event.getJobID());
} catch (IOException ioe) {
LOG.error("Error JobHistoryEventHandler in handleEvent: " + event,
ioe);
@@ -396,6 +390,12 @@ public class JobHistoryEventHandler exte
throw new YarnException(e);
}
+ if (event.getHistoryEvent().getEventType() == EventType.JOB_SUBMITTED) {
+ JobSubmittedEvent jobSubmittedEvent =
+ (JobSubmittedEvent) event.getHistoryEvent();
+ mi.getJobIndexInfo().setSubmitTime(jobSubmittedEvent.getSubmitTime());
+ }
+
// If this is JobFinishedEvent, close the writer and setup the job-index
if (event.getHistoryEvent().getEventType() == EventType.JOB_FINISHED) {
try {
@@ -436,6 +436,7 @@ public class JobHistoryEventHandler exte
JobSubmittedEvent jse = (JobSubmittedEvent) event;
summary.setUser(jse.getUserName());
summary.setQueue(jse.getJobQueueName());
+ summary.setJobSubmitTime(jse.getSubmitTime());
break;
case JOB_INITED:
JobInitedEvent jie = (JobInitedEvent) event;
@@ -588,12 +589,12 @@ public class JobHistoryEventHandler exte
JobIndexInfo jobIndexInfo;
JobSummary jobSummary;
- MetaInfo(Path historyFile, Path conf, EventWriter writer, long submitTime,
+ MetaInfo(Path historyFile, Path conf, EventWriter writer,
String user, String jobName, JobId jobId) {
this.historyFile = historyFile;
this.confFile = conf;
this.writer = writer;
- this.jobIndexInfo = new JobIndexInfo(submitTime, -1, user, jobName, jobId, -1, -1, null);
+ this.jobIndexInfo = new JobIndexInfo(-1, -1, user, jobName, jobId, -1, -1, null);
this.jobSummary = new JobSummary();
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/MRAppMaster.java Wed Oct 19 05:28:22 2011
@@ -22,7 +22,10 @@ import java.io.File;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
+import java.net.InetSocketAddress;
import java.security.PrivilegedExceptionAction;
+import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
@@ -40,6 +43,8 @@ import org.apache.hadoop.mapred.TaskUmbi
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
+import org.apache.hadoop.mapreduce.jobhistory.AMStartedEvent;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -72,6 +77,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanerImpl;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
+import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
@@ -82,6 +88,7 @@ import org.apache.hadoop.yarn.YarnExcept
import org.apache.hadoop.yarn.api.ApplicationConstants;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.AsyncDispatcher;
import org.apache.hadoop.yarn.event.Dispatcher;
@@ -115,14 +122,20 @@ public class MRAppMaster extends Composi
private static final Log LOG = LogFactory.getLog(MRAppMaster.class);
private Clock clock;
- private final long startTime = System.currentTimeMillis();
+ private final long startTime;
+ private final long appSubmitTime;
private String appName;
private final ApplicationAttemptId appAttemptID;
+ private final ContainerId containerID;
+ private final String nmHost;
+ private final int nmHttpPort;
protected final MRAppMetrics metrics;
private Set<TaskId> completedTasksFromPreviousRun;
+ private List<AMInfo> amInfos;
private AppContext context;
private Dispatcher dispatcher;
private ClientService clientService;
+ private Recovery recoveryServ;
private ContainerAllocator containerAllocator;
private ContainerLauncher containerLauncher;
private TaskCleaner taskCleaner;
@@ -131,19 +144,29 @@ public class MRAppMaster extends Composi
private JobTokenSecretManager jobTokenSecretManager =
new JobTokenSecretManager();
private JobEventDispatcher jobEventDispatcher;
+ private boolean inRecovery = false;
private Job job;
private Credentials fsTokens = new Credentials(); // Filled during init
private UserGroupInformation currentUser; // Will be setup during init
- public MRAppMaster(ApplicationAttemptId applicationAttemptId) {
- this(applicationAttemptId, new SystemClock());
+ public MRAppMaster(ApplicationAttemptId applicationAttemptId,
+ ContainerId containerId, String nmHost, int nmHttpPort, long appSubmitTime) {
+ this(applicationAttemptId, containerId, nmHost, nmHttpPort,
+ new SystemClock(), appSubmitTime);
}
- public MRAppMaster(ApplicationAttemptId applicationAttemptId, Clock clock) {
+ public MRAppMaster(ApplicationAttemptId applicationAttemptId,
+ ContainerId containerId, String nmHost, int nmHttpPort, Clock clock,
+ long appSubmitTime) {
super(MRAppMaster.class.getName());
this.clock = clock;
+ this.startTime = clock.getTime();
+ this.appSubmitTime = appSubmitTime;
this.appAttemptID = applicationAttemptId;
+ this.containerID = containerId;
+ this.nmHost = nmHost;
+ this.nmHttpPort = nmHttpPort;
this.metrics = MRAppMetrics.create();
LOG.info("Created MRAppMaster for application " + applicationAttemptId);
}
@@ -162,11 +185,11 @@ public class MRAppMaster extends Composi
if (conf.getBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, false)
&& appAttemptID.getAttemptId() > 1) {
LOG.info("Recovery is enabled. Will try to recover from previous life.");
- Recovery recoveryServ = new RecoveryService(appAttemptID, clock);
+ recoveryServ = new RecoveryService(appAttemptID, clock);
addIfService(recoveryServ);
dispatcher = recoveryServ.getDispatcher();
clock = recoveryServ.getClock();
- completedTasksFromPreviousRun = recoveryServ.getCompletedTasks();
+ inRecovery = true;
} else {
dispatcher = new AsyncDispatcher();
addIfService(dispatcher);
@@ -327,7 +350,8 @@ public class MRAppMaster extends Composi
// create single job
Job newJob = new JobImpl(appAttemptID, conf, dispatcher.getEventHandler(),
taskAttemptListener, jobTokenSecretManager, fsTokens, clock,
- completedTasksFromPreviousRun, metrics, currentUser.getUserName());
+ completedTasksFromPreviousRun, metrics, currentUser.getUserName(),
+ appSubmitTime, amInfos);
((RunningAppContext) context).jobs.put(newJob.getID(), newJob);
dispatcher.register(JobFinishEvent.Type.class,
@@ -463,6 +487,10 @@ public class MRAppMaster extends Composi
return completedTasksFromPreviousRun;
}
+ public List<AMInfo> getAllAMInfos() {
+ return amInfos;
+ }
+
public ContainerAllocator getContainerAllocator() {
return containerAllocator;
}
@@ -617,11 +645,33 @@ public class MRAppMaster extends Composi
@Override
public void start() {
- ///////////////////// Create the job itself.
+ // Pull completedTasks etc from recovery
+ if (inRecovery) {
+ completedTasksFromPreviousRun = recoveryServ.getCompletedTasks();
+ amInfos = recoveryServ.getAMInfos();
+ }
+
+ // / Create the AMInfo for the current AppMaster
+ if (amInfos == null) {
+ amInfos = new LinkedList<AMInfo>();
+ }
+ AMInfo amInfo =
+ new AMInfo(appAttemptID, startTime, containerID, nmHost, nmHttpPort);
+ amInfos.add(amInfo);
+
+ // /////////////////// Create the job itself.
job = createJob(getConfig());
-
+
// End of creating the job.
+ // Send out an MR AM inited event for this AM and all previous AMs.
+ for (AMInfo info : amInfos) {
+ dispatcher.getEventHandler().handle(
+ new JobHistoryEvent(job.getID(), new AMStartedEvent(info
+ .getAppAttemptId(), info.getStartTime(), info.getContainerId(),
+ info.getNodeManagerHost(), info.getNodeManagerHttpPort())));
+ }
+
// metrics system init is really init & start.
// It's more test friendly to put it here.
DefaultMetricsSystem.initialize("MRAppMaster");
@@ -723,17 +773,39 @@ public class MRAppMaster extends Composi
public static void main(String[] args) {
try {
- String applicationAttemptIdStr = System
- .getenv(ApplicationConstants.APPLICATION_ATTEMPT_ID_ENV);
- if (applicationAttemptIdStr == null) {
- String msg = ApplicationConstants.APPLICATION_ATTEMPT_ID_ENV
- + " is null";
+ String containerIdStr =
+ System.getenv(ApplicationConstants.AM_CONTAINER_ID_ENV);
+ String nodeHttpAddressStr =
+ System.getenv(ApplicationConstants.NM_HTTP_ADDRESS_ENV);
+ String appSubmitTimeStr =
+ System.getenv(ApplicationConstants.APP_SUBMIT_TIME_ENV);
+ if (containerIdStr == null) {
+ String msg = ApplicationConstants.AM_CONTAINER_ID_ENV + " is null";
+ LOG.error(msg);
+ throw new IOException(msg);
+ }
+ if (nodeHttpAddressStr == null) {
+ String msg = ApplicationConstants.NM_HTTP_ADDRESS_ENV + " is null";
LOG.error(msg);
throw new IOException(msg);
}
- ApplicationAttemptId applicationAttemptId = ConverterUtils
- .toApplicationAttemptId(applicationAttemptIdStr);
- MRAppMaster appMaster = new MRAppMaster(applicationAttemptId);
+ if (appSubmitTimeStr == null) {
+ String msg = ApplicationConstants.APP_SUBMIT_TIME_ENV + " is null";
+ LOG.error(msg);
+ throw new IOException(msg);
+ }
+
+ ContainerId containerId = ConverterUtils.toContainerId(containerIdStr);
+ ApplicationAttemptId applicationAttemptId =
+ containerId.getApplicationAttemptId();
+ InetSocketAddress nodeHttpInetAddr =
+ NetUtils.createSocketAddr(nodeHttpAddressStr);
+ long appSubmitTime = Long.parseLong(appSubmitTimeStr);
+
+ MRAppMaster appMaster =
+ new MRAppMaster(applicationAttemptId, containerId,
+ nodeHttpInetAddr.getHostName(), nodeHttpInetAddr.getPort(),
+ appSubmitTime);
Runtime.getRuntime().addShutdownHook(
new CompositeServiceShutdownHook(appMaster));
YarnConfiguration conf = new YarnConfiguration(new JobConf());
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/Job.java Wed Oct 19 05:28:22 2011
@@ -23,6 +23,7 @@ import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@@ -68,5 +69,10 @@ public interface Job {
TaskAttemptCompletionEvent[]
getTaskAttemptCompletionEvents(int fromEventId, int maxEvents);
+ /**
+ * @return information for MR AppMasters (previously failed and current)
+ */
+ List<AMInfo> getAMInfos();
+
boolean checkAccess(UserGroupInformation callerUGI, JobACL jobOperation);
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java Wed Oct 19 05:28:22 2011
@@ -51,6 +51,7 @@ import org.apache.hadoop.mapreduce.TaskA
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobFinishedEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobInfoChangeEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobInitedEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobSubmittedEvent;
@@ -136,6 +137,7 @@ public class JobImpl implements org.apac
private final String username;
private final Map<JobACL, AccessControlList> jobACLs;
private final Set<TaskId> completedTasksFromPreviousRun;
+ private final List<AMInfo> amInfos;
private final Lock readLock;
private final Lock writeLock;
private final JobId jobId;
@@ -148,6 +150,7 @@ public class JobImpl implements org.apac
private final EventHandler eventHandler;
private final MRAppMetrics metrics;
private final String userName;
+ private final long appSubmitTime;
private boolean lazyTasksCopyNeeded = false;
private volatile Map<TaskId, Task> tasks = new LinkedHashMap<TaskId, Task>();
@@ -354,7 +357,6 @@ public class JobImpl implements org.apac
private int failedReduceTaskCount = 0;
private int killedMapTaskCount = 0;
private int killedReduceTaskCount = 0;
- private long submitTime;
private long startTime;
private long finishTime;
private float setupProgress;
@@ -370,7 +372,7 @@ public class JobImpl implements org.apac
JobTokenSecretManager jobTokenSecretManager,
Credentials fsTokenCredentials, Clock clock,
Set<TaskId> completedTasksFromPreviousRun, MRAppMetrics metrics,
- String userName) {
+ String userName, long appSubmitTime, List<AMInfo> amInfos) {
this.applicationAttemptId = applicationAttemptId;
this.jobId = recordFactory.newRecordInstance(JobId.class);
this.jobName = conf.get(JobContext.JOB_NAME, "<missing job name>");
@@ -378,7 +380,9 @@ public class JobImpl implements org.apac
this.metrics = metrics;
this.clock = clock;
this.completedTasksFromPreviousRun = completedTasksFromPreviousRun;
+ this.amInfos = amInfos;
this.userName = userName;
+ this.appSubmitTime = appSubmitTime;
ApplicationId applicationId = applicationAttemptId.getApplicationId();
jobId.setAppId(applicationId);
jobId.setId(applicationId.getId());
@@ -806,6 +810,11 @@ public class JobImpl implements org.apac
public Map<JobACL, AccessControlList> getJobACLs() {
return Collections.unmodifiableMap(jobACLs);
}
+
+ @Override
+ public List<AMInfo> getAMInfos() {
+ return amInfos;
+ }
public static class InitTransition
implements MultipleArcTransition<JobImpl, JobEvent, JobState> {
@@ -819,7 +828,6 @@ public class JobImpl implements org.apac
*/
@Override
public JobState transition(JobImpl job, JobEvent event) {
- job.submitTime = job.clock.getTime();
job.metrics.submittedJob(job);
job.metrics.preparingJob(job);
try {
@@ -830,7 +838,7 @@ public class JobImpl implements org.apac
JobSubmittedEvent jse = new JobSubmittedEvent(job.oldJobId,
job.conf.get(MRJobConfig.JOB_NAME, "test"),
job.conf.get(MRJobConfig.USER_NAME, "mapred"),
- job.submitTime,
+ job.appSubmitTime,
job.remoteJobConfFile.toString(),
job.jobACLs, job.conf.get(MRJobConfig.QUEUE_NAME, "default"));
job.eventHandler.handle(new JobHistoryEvent(job.jobId, jse));
@@ -1152,7 +1160,7 @@ public class JobImpl implements org.apac
job.isUber()); //Will transition to state running. Currently in INITED
job.eventHandler.handle(new JobHistoryEvent(job.jobId, jie));
JobInfoChangeEvent jice = new JobInfoChangeEvent(job.oldJobId,
- job.submitTime, job.startTime);
+ job.appSubmitTime, job.startTime);
job.eventHandler.handle(new JobHistoryEvent(job.jobId, jice));
job.metrics.runningJob(job);
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Wed Oct 19 05:28:22 2011
@@ -894,15 +894,20 @@ public abstract class TaskAttemptImpl im
return jce;
}
- private static TaskAttemptUnsuccessfulCompletionEvent createTaskAttemptUnsuccessfulCompletionEvent(
- TaskAttemptImpl taskAttempt, TaskAttemptState attemptState) {
- TaskAttemptUnsuccessfulCompletionEvent tauce = new TaskAttemptUnsuccessfulCompletionEvent(
- TypeConverter.fromYarn(taskAttempt.attemptId),
- TypeConverter.fromYarn(taskAttempt.attemptId.getTaskId().getTaskType()),
- attemptState.toString(), taskAttempt.finishTime,
- taskAttempt.nodeHostName == null ? "UNKNOWN" : taskAttempt.nodeHostName,
- StringUtils.join(LINE_SEPARATOR, taskAttempt.getDiagnostics()),
- taskAttempt.getProgressSplitBlock().burst());
+ private static
+ TaskAttemptUnsuccessfulCompletionEvent
+ createTaskAttemptUnsuccessfulCompletionEvent(TaskAttemptImpl taskAttempt,
+ TaskAttemptState attemptState) {
+ TaskAttemptUnsuccessfulCompletionEvent tauce =
+ new TaskAttemptUnsuccessfulCompletionEvent(
+ TypeConverter.fromYarn(taskAttempt.attemptId),
+ TypeConverter.fromYarn(taskAttempt.attemptId.getTaskId()
+ .getTaskType()), attemptState.toString(),
+ taskAttempt.finishTime,
+ taskAttempt.containerMgrAddress == null ? "UNKNOWN"
+ : taskAttempt.containerMgrAddress, StringUtils.join(
+ LINE_SEPARATOR, taskAttempt.getDiagnostics()), taskAttempt
+ .getProgressSplitBlock().burst());
return tauce;
}
@@ -1120,11 +1125,15 @@ public abstract class TaskAttemptImpl im
, 1);
taskAttempt.eventHandler.handle(jce);
+ LOG.info("TaskAttempt: [" + taskAttempt.attemptId
+ + "] using containerId: [" + taskAttempt.containerID + " on NM: ["
+ + taskAttempt.containerMgrAddress + "]");
TaskAttemptStartedEvent tase =
new TaskAttemptStartedEvent(TypeConverter.fromYarn(taskAttempt.attemptId),
TypeConverter.fromYarn(taskAttempt.attemptId.getTaskId().getTaskType()),
taskAttempt.launchTime,
- nodeHttpInetAddr.getHostName(), nodeHttpInetAddr.getPort(), taskAttempt.shufflePort);
+ nodeHttpInetAddr.getHostName(), nodeHttpInetAddr.getPort(),
+ taskAttempt.shufflePort, taskAttempt.containerID);
taskAttempt.eventHandler.handle
(new JobHistoryEvent(taskAttempt.attemptId.getTaskId().getJobId(), tase));
taskAttempt.eventHandler.handle
@@ -1236,7 +1245,8 @@ public abstract class TaskAttemptImpl im
TypeConverter.fromYarn(attemptId.getTaskId().getTaskType()),
state.toString(),
this.reportedStatus.mapFinishTime,
- finishTime, this.nodeHostName == null ? "UNKNOWN" : this.nodeHostName,
+ finishTime, this.containerMgrAddress == null ? "UNKNOWN"
+ : this.containerMgrAddress,
this.reportedStatus.stateString,
TypeConverter.fromYarn(getCounters()),
getProgressSplitBlock().burst());
@@ -1249,7 +1259,8 @@ public abstract class TaskAttemptImpl im
state.toString(),
this.reportedStatus.shuffleFinishTime,
this.reportedStatus.sortFinishTime,
- finishTime, this.nodeHostName == null ? "UNKNOWN" : this.nodeHostName,
+ finishTime, this.containerMgrAddress == null ? "UNKNOWN"
+ : this.containerMgrAddress,
this.reportedStatus.stateString,
TypeConverter.fromYarn(getCounters()),
getProgressSplitBlock().burst());
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/Recovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/Recovery.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/Recovery.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/Recovery.java Wed Oct 19 05:28:22 2011
@@ -18,8 +18,10 @@
package org.apache.hadoop.mapreduce.v2.app.recover;
+import java.util.List;
import java.util.Set;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.event.Dispatcher;
@@ -31,4 +33,6 @@ public interface Recovery {
Clock getClock();
Set<TaskId> getCompletedTasks();
+
+ List<AMInfo> getAMInfos();
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/recover/RecoveryService.java Wed Oct 19 05:28:22 2011
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.a
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
+import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -34,6 +35,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Phase;
@@ -148,6 +150,14 @@ public class RecoveryService extends Com
public Set<TaskId> getCompletedTasks() {
return completedTasks.keySet();
}
+
+ @Override
+ public List<AMInfo> getAMInfos() {
+ if (jobInfo == null || jobInfo.getAMInfos() == null) {
+ return new LinkedList<AMInfo>();
+ }
+ return new LinkedList<AMInfo>(jobInfo.getAMInfos());
+ }
private void parse() throws IOException {
// TODO: parse history file based on startCount
@@ -351,15 +361,16 @@ public class RecoveryService extends Com
private void sendAssignedEvent(TaskAttemptId yarnAttemptID,
TaskAttemptInfo attemptInfo) {
LOG.info("Sending assigned event to " + yarnAttemptID);
- ContainerId cId = recordFactory
- .newRecordInstance(ContainerId.class);
+ ContainerId cId = attemptInfo.getContainerId();
Container container = recordFactory
.newRecordInstance(Container.class);
container.setId(cId);
container.setNodeId(recordFactory
.newRecordInstance(NodeId.class));
+ // NodeId can be obtained from TaskAttemptInfo.hostname - but this will
+ // eventually contain rack info.
container.setContainerToken(null);
- container.setNodeHttpAddress(attemptInfo.getHostname() + ":" +
+ container.setNodeHttpAddress(attemptInfo.getTrackerName() + ":" +
attemptInfo.getHttpPort());
actualHandler.handle(new TaskAttemptContainerAssignedEvent(yarnAttemptID,
container));
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Wed Oct 19 05:28:22 2011
@@ -77,6 +77,7 @@ import org.apache.hadoop.yarn.factory.pr
import org.apache.hadoop.yarn.service.Service;
import org.apache.hadoop.yarn.state.StateMachine;
import org.apache.hadoop.yarn.state.StateMachineFactory;
+import org.apache.hadoop.yarn.util.BuilderUtils;
/**
@@ -118,10 +119,20 @@ public class MRApp extends MRAppMaster {
applicationAttemptId.setAttemptId(startCount);
return applicationAttemptId;
}
+
+ private static ContainerId getContainerId(ApplicationId applicationId,
+ int startCount) {
+ ApplicationAttemptId appAttemptId =
+ getApplicationAttemptId(applicationId, startCount);
+ ContainerId containerId =
+ BuilderUtils.newContainerId(appAttemptId, startCount);
+ return containerId;
+ }
public MRApp(int maps, int reduces, boolean autoComplete, String testName,
boolean cleanOnStart, int startCount) {
- super(getApplicationAttemptId(applicationId, startCount));
+ super(getApplicationAttemptId(applicationId, startCount), getContainerId(
+ applicationId, startCount), "testhost", 3333, System.currentTimeMillis());
this.testWorkDir = new File("target", testName);
testAbsPath = new Path(testWorkDir.getAbsolutePath());
LOG.info("PathUsed: " + testAbsPath);
@@ -405,10 +416,10 @@ public class MRApp extends MRAppMaster {
public TestJob(Configuration conf, ApplicationId applicationId,
EventHandler eventHandler, TaskAttemptListener taskAttemptListener,
Clock clock, String user) {
- super(getApplicationAttemptId(applicationId, getStartCount()),
- conf, eventHandler, taskAttemptListener,
- new JobTokenSecretManager(), new Credentials(), clock,
- getCompletedTaskFromPreviousRun(), metrics, user);
+ super(getApplicationAttemptId(applicationId, getStartCount()), conf,
+ eventHandler, taskAttemptListener, new JobTokenSecretManager(),
+ new Credentials(), clock, getCompletedTaskFromPreviousRun(), metrics,
+ user, System.currentTimeMillis(), getAllAMInfos());
// This "this leak" is okay because the retained pointer is in an
// instance variable.
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java Wed Oct 19 05:28:22 2011
@@ -33,6 +33,7 @@ import org.apache.hadoop.mapreduce.FileS
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.JobCounter;
import org.apache.hadoop.mapreduce.TaskCounter;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@@ -488,6 +489,11 @@ public class MockJobs extends MockApps {
public Map<JobACL, AccessControlList> getJobACLs() {
return Collections.<JobACL, AccessControlList>emptyMap();
}
+
+ @Override
+ public List<AMInfo> getAMInfos() {
+ throw new UnsupportedOperationException("Not supported yet.");
+ }
};
}
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestMRAppMaster.java Wed Oct 19 05:28:22 2011
@@ -27,6 +27,7 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.util.ConverterUtils;
import org.junit.Test;
@@ -36,11 +37,15 @@ public class TestMRAppMaster {
public void testMRAppMasterForDifferentUser() throws IOException,
InterruptedException {
String applicationAttemptIdStr = "appattempt_1317529182569_0004_000001";
+ String containerIdStr = "container_1317529182569_0004_000001_1";
String stagingDir = "/tmp/staging";
String userName = "TestAppMasterUser";
ApplicationAttemptId applicationAttemptId = ConverterUtils
.toApplicationAttemptId(applicationAttemptIdStr);
- MRAppMasterTest appMaster = new MRAppMasterTest(applicationAttemptId);
+ ContainerId containerId = ConverterUtils.toContainerId(containerIdStr);
+ MRAppMasterTest appMaster =
+ new MRAppMasterTest(applicationAttemptId, containerId, "host", -1,
+ System.currentTimeMillis());
YarnConfiguration conf = new YarnConfiguration();
conf.set(MRJobConfig.MR_AM_STAGING_DIR, stagingDir);
MRAppMaster.initAndStartAppMaster(appMaster, conf, userName);
@@ -54,8 +59,9 @@ class MRAppMasterTest extends MRAppMaste
Path stagingDirPath;
private Configuration conf;
- public MRAppMasterTest(ApplicationAttemptId applicationAttemptId) {
- super(applicationAttemptId);
+ public MRAppMasterTest(ApplicationAttemptId applicationAttemptId,
+ ContainerId containerId, String host, int port, long submitTime) {
+ super(applicationAttemptId, containerId, host, port, submitTime);
}
@Override
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Wed Oct 19 05:28:22 2011
@@ -340,8 +340,8 @@ public class TestRMContainerAllocator {
public FakeJob(ApplicationAttemptId appAttemptID, Configuration conf,
int numMaps, int numReduces) {
- super(appAttemptID, conf, null, null, null, null, null, null, null,
- null);
+ super(appAttemptID, conf, null, null, null, null, null, null, null, null,
+ System.currentTimeMillis(), null);
this.jobId = MRBuilderUtils
.newJobId(appAttemptID.getApplicationId(), 0);
this.numMaps = numMaps;
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java Wed Oct 19 05:28:22 2011
@@ -28,6 +28,7 @@ import org.apache.hadoop.conf.Configurat
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEventHandler;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskState;
@@ -46,6 +47,7 @@ public class TestRecovery {
@Test
public void testCrashed() throws Exception {
int runCount = 0;
+ long am1StartTimeEst = System.currentTimeMillis();
MRApp app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), true, ++runCount);
Configuration conf = new Configuration();
conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
@@ -126,9 +128,10 @@ public class TestRecovery {
//stop the app
app.stop();
-
+
//rerun
//in rerun the 1st map will be recovered from previous run
+ long am2StartTimeEst = System.currentTimeMillis();
app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount);
conf = new Configuration();
conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true);
@@ -178,8 +181,27 @@ public class TestRecovery {
task1StartTime, mapTask1.getReport().getStartTime());
Assert.assertEquals("Task Finish time not correct",
task1FinishTime, mapTask1.getReport().getFinishTime());
+ Assert.assertEquals(2, job.getAMInfos().size());
+ int attemptNum = 1;
+ // Verify AMInfo
+ for (AMInfo amInfo : job.getAMInfos()) {
+ Assert.assertEquals(attemptNum++, amInfo.getAppAttemptId()
+ .getAttemptId());
+ Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId()
+ .getApplicationAttemptId());
+ Assert.assertEquals("testhost", amInfo.getNodeManagerHost());
+ Assert.assertEquals(3333, amInfo.getNodeManagerHttpPort());
+ }
+ long am1StartTimeReal = job.getAMInfos().get(0).getStartTime();
+ long am2StartTimeReal = job.getAMInfos().get(1).getStartTime();
+ Assert.assertTrue(am1StartTimeReal >= am1StartTimeEst
+ && am1StartTimeReal <= am2StartTimeEst);
+ Assert.assertTrue(am2StartTimeReal >= am2StartTimeEst
+ && am2StartTimeReal <= System.currentTimeMillis());
+ // TODO Add verification of additional data from jobHistory - whatever was
+ // available in the failed attempt should be available here
}
-
+
class MRAppWithHistory extends MRApp {
public MRAppWithHistory(int maps, int reduces, boolean autoComplete,
String testName, boolean cleanOnStart, int startCount) {
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRuntimeEstimators.java Wed Oct 19 05:28:22 2011
@@ -33,6 +33,7 @@ import java.util.concurrent.atomic.Atomi
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@@ -473,6 +474,11 @@ public class TestRuntimeEstimators {
public Map<JobACL, AccessControlList> getJobACLs() {
throw new UnsupportedOperationException("Not supported yet.");
}
+
+ @Override
+ public List<AMInfo> getAMInfos() {
+ throw new UnsupportedOperationException("Not supported yet.");
+ }
}
/*
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestStagingCleanup.java Wed Oct 19 05:28:22 2011
@@ -41,6 +41,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test;
@@ -81,7 +82,9 @@ import org.junit.Test;
private class TestMRApp extends MRAppMaster {
public TestMRApp(ApplicationAttemptId applicationAttemptId) {
- super(applicationAttemptId);
+ super(applicationAttemptId, BuilderUtils.newContainerId(
+ applicationAttemptId, 1), "testhost", 3333, System
+ .currentTimeMillis());
}
@Override
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/avro/Events.avpr Wed Oct 19 05:28:22 2011
@@ -69,6 +69,16 @@
]
},
+ {"type": "record", "name": "AMStarted",
+ "fields": [
+ {"name": "applicationAttemptId", "type": "string"},
+ {"name": "startTime", "type": "long"},
+ {"name": "containerId", "type": "string"},
+ {"name": "nodeManagerHost", "type": "string"},
+ {"name": "nodeManagerHttpPort", "type": "int"}
+ ]
+ },
+
{"type": "record", "name": "JobSubmitted",
"fields": [
{"name": "jobid", "type": "string"},
@@ -174,7 +184,8 @@
{"name": "startTime", "type": "long"},
{"name": "trackerName", "type": "string"},
{"name": "httpPort", "type": "int"},
- {"name": "shufflePort", "type": "int"}
+ {"name": "shufflePort", "type": "int"},
+ {"name": "containerId", "type": "string"}
]
},
@@ -260,7 +271,8 @@
"CLEANUP_ATTEMPT_STARTED",
"CLEANUP_ATTEMPT_FINISHED",
"CLEANUP_ATTEMPT_FAILED",
- "CLEANUP_ATTEMPT_KILLED"
+ "CLEANUP_ATTEMPT_KILLED",
+ "AM_STARTED"
]
},
@@ -272,6 +284,7 @@
"JobFinished",
"JobInfoChange",
"JobInited",
+ "AMStarted",
"JobPriorityChange",
"JobStatusChanged",
"JobSubmitted",
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/EventReader.java Wed Oct 19 05:28:22 2011
@@ -33,7 +33,6 @@ import org.apache.hadoop.mapreduce.Count
import org.apache.avro.Schema;
import org.apache.avro.io.Decoder;
import org.apache.avro.io.DecoderFactory;
-import org.apache.avro.io.JsonDecoder;
import org.apache.avro.io.DatumReader;
import org.apache.avro.specific.SpecificDatumReader;
@@ -146,8 +145,10 @@ public class EventReader implements Clos
result = new TaskAttemptUnsuccessfulCompletionEvent(); break;
case CLEANUP_ATTEMPT_KILLED:
result = new TaskAttemptUnsuccessfulCompletionEvent(); break;
+ case AM_STARTED:
+ result = new AMStartedEvent(); break;
default:
- throw new RuntimeException("unexpected event type!");
+ throw new RuntimeException("unexpected event type: " + wrapper.type);
}
result.setDatum(wrapper.event);
return result;
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java Wed Oct 19 05:28:22 2011
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.jobh
import java.io.IOException;
import java.util.HashMap;
+import java.util.LinkedList;
+import java.util.List;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
@@ -37,6 +39,8 @@ import org.apache.hadoop.mapreduce.TaskI
import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
/**
* Default Parser for the JobHistory files. Typical usage is
@@ -174,6 +178,9 @@ public class JobHistoryParser {
case CLEANUP_ATTEMPT_FINISHED:
handleTaskAttemptFinishedEvent((TaskAttemptFinishedEvent) event);
break;
+ case AM_STARTED:
+ handleAMStartedEvent((AMStartedEvent) event);
+ break;
default:
break;
}
@@ -241,6 +248,7 @@ public class JobHistoryParser {
attemptInfo.trackerName = event.getTrackerName();
attemptInfo.taskType = event.getTaskType();
attemptInfo.shufflePort = event.getShufflePort();
+ attemptInfo.containerId = event.getContainerId();
taskInfo.attemptsMap.put(attemptId, attemptInfo);
}
@@ -305,6 +313,20 @@ public class JobHistoryParser {
info.totalReduces = event.getTotalReduces();
info.uberized = event.getUberized();
}
+
+ private void handleAMStartedEvent(AMStartedEvent event) {
+ AMInfo amInfo = new AMInfo();
+ amInfo.appAttemptId = event.getAppAttemptId();
+ amInfo.startTime = event.getStartTime();
+ amInfo.containerId = event.getContainerId();
+ amInfo.nodeManagerHost = event.getNodeManagerHost();
+ amInfo.nodeManagerHttpPort = event.getNodeManagerHttpPort();
+ if (info.amInfos == null) {
+ info.amInfos = new LinkedList<AMInfo>();
+ }
+ info.amInfos.add(amInfo);
+ info.latestAmInfo = amInfo;
+ }
private void handleJobInfoChangeEvent(JobInfoChangeEvent event) {
info.submitTime = event.getSubmitTime();
@@ -348,6 +370,8 @@ public class JobHistoryParser {
Map<JobACL, AccessControlList> jobACLs;
Map<TaskID, TaskInfo> tasksMap;
+ List<AMInfo> amInfos;
+ AMInfo latestAmInfo;
boolean uberized;
/** Create a job info object where job information will be stored
@@ -377,7 +401,9 @@ public class JobHistoryParser {
System.out.println("REDUCE_COUNTERS:" + reduceCounters.toString());
System.out.println("TOTAL_COUNTERS: " + totalCounters.toString());
System.out.println("UBERIZED: " + uberized);
-
+ for (AMInfo amInfo : amInfos) {
+ amInfo.printAll();
+ }
for (TaskInfo ti: tasksMap.values()) {
ti.printAll();
}
@@ -427,6 +453,10 @@ public class JobHistoryParser {
public Map<JobACL, AccessControlList> getJobACLs() { return jobACLs; }
/** @return the uberized status of this job */
public boolean getUberized() { return uberized; }
+ /** @return the AMInfo for the job's AppMaster */
+ public List<AMInfo> getAMInfos() { return amInfos; }
+ /** @return the AMInfo for the newest AppMaster */
+ public AMInfo getLatestAMInfo() { return latestAmInfo; };
}
/**
@@ -509,6 +539,7 @@ public class JobHistoryParser {
int httpPort;
int shufflePort;
String hostname;
+ ContainerId containerId;
/** Create a Task Attempt Info which will store attempt level information
* on a history parse.
@@ -534,6 +565,7 @@ public class JobHistoryParser {
System.out.println("TRACKER_NAME:" + trackerName);
System.out.println("HTTP_PORT:" + httpPort);
System.out.println("SHUFFLE_PORT:" + shufflePort);
+ System.out.println("CONTIANER_ID:" + containerId);
if (counters != null) {
System.out.println("COUNTERS:" + counters.toString());
}
@@ -569,5 +601,74 @@ public class JobHistoryParser {
public int getHttpPort() { return httpPort; }
/** @return the Shuffle port for the tracker */
public int getShufflePort() { return shufflePort; }
+ /** @return the ContainerId for the tracker */
+ public ContainerId getContainerId() { return containerId; }
+ }
+
+ /**
+ * Stores AM information
+ */
+ public static class AMInfo {
+ ApplicationAttemptId appAttemptId;
+ long startTime;
+ ContainerId containerId;
+ String nodeManagerHost;
+ int nodeManagerHttpPort;
+
+ /**
+ * Create a AM Info which will store AM level information on a history
+ * parse.
+ */
+ public AMInfo() {
+ startTime = -1;
+ nodeManagerHost = "";
+ nodeManagerHttpPort = -1;
+ }
+
+ public AMInfo(ApplicationAttemptId appAttemptId, long startTime,
+ ContainerId containerId, String nodeManagerHost, int nodeManagerHttpPort) {
+ this.appAttemptId = appAttemptId;
+ this.startTime = startTime;
+ this.containerId = containerId;
+ this.nodeManagerHost = nodeManagerHost;
+ this.nodeManagerHttpPort = nodeManagerHttpPort;
+ }
+
+ /**
+ * Print all the information about this AM.
+ */
+ public void printAll() {
+ System.out.println("APPLICATION_ATTEMPT_ID:" + appAttemptId.toString());
+ System.out.println("START_TIME: " + startTime);
+ System.out.println("CONTAINER_ID: " + containerId.toString());
+ System.out.println("NODE_MANAGER_HOST: " + nodeManagerHost);
+ System.out.println("NODE_MANAGER_HTTP_PORT: " + nodeManagerHttpPort);
+ }
+
+ /** @return the ApplicationAttemptId */
+ public ApplicationAttemptId getAppAttemptId() {
+ return appAttemptId;
+ }
+
+ /** @return the start time of the AM */
+ public long getStartTime() {
+ return startTime;
+ }
+
+ /** @return the container id for the AM */
+ public ContainerId getContainerId() {
+ return containerId;
+ }
+
+ /** @return the host name for the node manager on which the AM is running */
+ public String getNodeManagerHost() {
+ return nodeManagerHost;
+ }
+
+ /** @return the http port for the node manager running the AM */
+ public int getNodeManagerHttpPort() {
+ return nodeManagerHttpPort;
+ }
}
+
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobInitedEvent.java Wed Oct 19 05:28:22 2011
@@ -18,8 +18,6 @@
package org.apache.hadoop.mapreduce.jobhistory;
-import java.io.IOException;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.JobID;
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/TaskAttemptStartedEvent.java Wed Oct 19 05:28:22 2011
@@ -18,13 +18,13 @@
package org.apache.hadoop.mapreduce.jobhistory;
-import java.io.IOException;
-
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.mapreduce.TaskID;
import org.apache.hadoop.mapreduce.TaskType;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.avro.util.Utf8;
@@ -45,10 +45,11 @@ public class TaskAttemptStartedEvent imp
* @param trackerName Name of the Task Tracker where attempt is running
* @param httpPort The port number of the tracker
* @param shufflePort The shuffle port number of the container
+ * @param containerId The containerId for the task attempt.
*/
public TaskAttemptStartedEvent( TaskAttemptID attemptId,
TaskType taskType, long startTime, String trackerName,
- int httpPort, int shufflePort) {
+ int httpPort, int shufflePort, ContainerId containerId) {
datum.attemptId = new Utf8(attemptId.toString());
datum.taskid = new Utf8(attemptId.getTaskID().toString());
datum.startTime = startTime;
@@ -56,6 +57,15 @@ public class TaskAttemptStartedEvent imp
datum.trackerName = new Utf8(trackerName);
datum.httpPort = httpPort;
datum.shufflePort = shufflePort;
+ datum.containerId = new Utf8(containerId.toString());
+ }
+
+ // TODO Remove after MrV1 is removed.
+ // Using a dummy containerId to prevent jobHistory parse failures.
+ public TaskAttemptStartedEvent(TaskAttemptID attemptId, TaskType taskType,
+ long startTime, String trackerName, int httpPort, int shufflePort) {
+ this(attemptId, taskType, startTime, trackerName, httpPort, shufflePort,
+ ConverterUtils.toContainerId("container_-1_-1_-1_-1"));
}
TaskAttemptStartedEvent() {}
@@ -91,5 +101,8 @@ public class TaskAttemptStartedEvent imp
? EventType.MAP_ATTEMPT_STARTED
: EventType.REDUCE_ATTEMPT_STARTED;
}
-
+ /** Get the ContainerId */
+ public ContainerId getContainerId() {
+ return ConverterUtils.toContainerId(datum.containerId.toString());
+ }
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedJob.java Wed Oct 19 05:28:22 2011
@@ -36,6 +36,7 @@ import org.apache.hadoop.mapreduce.JobAC
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@@ -337,4 +338,9 @@ public class CompletedJob implements org
public Path getConfFile() {
return confFile;
}
+
+ @Override
+ public List<AMInfo> getAMInfos() {
+ return jobInfo.getAMInfos();
+ }
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/CompletedTaskAttempt.java Wed Oct 19 05:28:22 2011
@@ -29,8 +29,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
-import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -85,35 +83,17 @@ public class CompletedTaskAttempt implem
@Override
public ContainerId getAssignedContainerID() {
- //TODO ContainerId needs to be part of some historyEvent to be able to
- //render the log directory.
- ContainerId containerId =
- RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
- ContainerId.class);
- containerId.setId(-1);
- ApplicationAttemptId applicationAttemptId =
- RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
- ApplicationAttemptId.class);
- applicationAttemptId.setAttemptId(-1);
- ApplicationId applicationId =
- RecordFactoryProvider.getRecordFactory(null).newRecordInstance(
- ApplicationId.class);
- applicationId.setClusterTimestamp(-1);
- applicationId.setId(-1);
- applicationAttemptId.setApplicationId(applicationId);
- containerId.setApplicationAttemptId(applicationAttemptId);
- return containerId;
+ return attemptInfo.getContainerId();
}
@Override
public String getAssignedContainerMgrAddress() {
- // TODO Verify this is correct.
- return attemptInfo.getTrackerName();
+ return attemptInfo.getHostname();
}
@Override
public String getNodeHttpAddress() {
- return attemptInfo.getHostname() + ":" + attemptInfo.getHttpPort();
+ return attemptInfo.getTrackerName() + ":" + attemptInfo.getHttpPort();
}
@Override
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/PartialJob.java Wed Oct 19 05:28:22 2011
@@ -23,6 +23,7 @@ import java.util.Map;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.v2.api.records.Counters;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
@@ -159,4 +160,9 @@ public class PartialJob implements org.a
throw new IllegalStateException("Not implemented yet");
}
+ @Override
+ public List<AMInfo> getAMInfos() {
+ return null;
+ }
+
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryEvents.java Wed Oct 19 05:28:22 2011
@@ -41,8 +41,10 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.event.EventHandler;
import org.apache.hadoop.yarn.service.Service;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test;
public class TestJobHistoryEvents {
@@ -159,6 +161,10 @@ public class TestJobHistoryEvents {
private void verifyAttempt(TaskAttempt attempt) {
Assert.assertEquals("TaskAttempt state not currect",
TaskAttemptState.SUCCEEDED, attempt.getState());
+ Assert.assertNotNull(attempt.getAssignedContainerID());
+ //Verify the wrong ctor is not being used. Remove after mrv1 is removed.
+ ContainerId fakeCid = BuilderUtils.newContainerId(-1, -1, -1, -1);
+ Assert.assertFalse(attempt.getAssignedContainerID().equals(fakeCid));
}
static class MRAppWithHistory extends MRApp {
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/java/org/apache/hadoop/mapreduce/v2/hs/TestJobHistoryParsing.java Wed Oct 19 05:28:22 2011
@@ -34,8 +34,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.JobInfo;
-import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.AMInfo;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.jobhistory.JobHistoryParser.TaskInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.app.MRApp;
@@ -46,7 +47,9 @@ import org.apache.hadoop.mapreduce.v2.hs
import org.apache.hadoop.mapreduce.v2.jobhistory.FileNameIndexUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils;
import org.apache.hadoop.mapreduce.v2.jobhistory.JobIndexInfo;
+import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.service.Service;
+import org.apache.hadoop.yarn.util.BuilderUtils;
import org.junit.Test;
public class TestJobHistoryParsing {
@@ -54,6 +57,7 @@ public class TestJobHistoryParsing {
@Test
public void testHistoryParsing() throws Exception {
Configuration conf = new Configuration();
+ long amStartTimeEst = System.currentTimeMillis();
MRApp app = new MRAppWithHistory(2, 1, true, this.getClass().getName(), true);
app.submit(conf);
Job job = app.getContext().getAllJobs().values().iterator().next();
@@ -102,12 +106,30 @@ public class TestJobHistoryParsing {
job.isUber(), jobInfo.getUberized());
int totalTasks = jobInfo.getAllTasks().size();
Assert.assertEquals("total number of tasks is incorrect ", 3, totalTasks);
+
+ // Verify aminfo
+ Assert.assertEquals(1, jobInfo.getAMInfos().size());
+ Assert.assertEquals("testhost", jobInfo.getAMInfos().get(0)
+ .getNodeManagerHost());
+ AMInfo amInfo = jobInfo.getAMInfos().get(0);
+ Assert.assertEquals(3333, amInfo.getNodeManagerHttpPort());
+ Assert.assertEquals(1, amInfo.getAppAttemptId().getAttemptId());
+ Assert.assertEquals(amInfo.getAppAttemptId(), amInfo.getContainerId()
+ .getApplicationAttemptId());
+ Assert.assertTrue(amInfo.getStartTime() <= System.currentTimeMillis()
+ && amInfo.getStartTime() >= amStartTimeEst);
+ ContainerId fakeCid = BuilderUtils.newContainerId(-1, -1, -1, -1);
//Assert at taskAttempt level
for (TaskInfo taskInfo : jobInfo.getAllTasks().values()) {
int taskAttemptCount = taskInfo.getAllTaskAttempts().size();
Assert.assertEquals("total number of task attempts ",
1, taskAttemptCount);
+ TaskAttemptInfo taInfo =
+ taskInfo.getAllTaskAttempts().values().iterator().next();
+ Assert.assertNotNull(taInfo.getContainerId());
+ //Verify the wrong ctor is not being used. Remove after mrv1 is removed.
+ Assert.assertFalse(taInfo.getContainerId().equals(fakeCid));
}
// Deep compare Job and JobInfo
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java Wed Oct 19 05:28:22 2011
@@ -36,12 +36,24 @@ public interface ApplicationConstants {
// TODO: They say tokens via env isn't good.
public static final String APPLICATION_CLIENT_SECRET_ENV_NAME =
"AppClientTokenEnv";
-
+
+ /**
+ * The environment variable for CONTAINER_ID. Set in AppMaster environment
+ * only
+ */
+ public static final String AM_CONTAINER_ID_ENV = "AM_CONTAINER_ID";
+
+ /**
+ * The environment variable for NM_HTTP_ADDRESS. Set in AppMaster environment
+ * only
+ */
+ public static final String NM_HTTP_ADDRESS_ENV = "NM_HTTP_ADDRESS";
+
/**
- * The environmental variable for APPLICATION_ATTEMPT_ID. Set in
- * ApplicationMaster's environment only.
+ * The environment variable for APP_SUBMIT_TIME. Set in AppMaster environment
+ * only
*/
- public static final String APPLICATION_ATTEMPT_ID_ENV = "APPLICATION_ATTEMPT_ID";
+ public static final String APP_SUBMIT_TIME_ENV = "APP_SUBMIT_TIME_ENV";
public static final String CONTAINER_TOKEN_FILE_ENV_NAME =
UserGroupInformation.HADOOP_TOKEN_FILE_LOCATION;
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java?rev=1185977&r1=1185976&r2=1185977&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/records/ApplicationSubmissionContext.java Wed Oct 19 05:28:22 2011
@@ -126,7 +126,7 @@ public interface ApplicationSubmissionCo
@Public
@Stable
public void setUser(String user);
-
+
/**
* Get the <code>ContainerLaunchContext</code> to describe the
* <code>Container</code> with which the <code>ApplicationMaster</code> is