You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by at...@apache.org on 2011/09/14 00:49:38 UTC
svn commit: r1170378 [2/12] - in
/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/
dev-support/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop-mapreduce-clie...
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskAttemptImpl.java Tue Sep 13 22:49:27 2011
@@ -94,7 +94,6 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerRequestEvent;
import org.apache.hadoop.mapreduce.v2.app.speculate.SpeculatorEvent;
import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
-import org.apache.hadoop.mapreduce.v2.jobhistory.JHConfig;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
@@ -138,6 +137,7 @@ public abstract class TaskAttemptImpl im
protected final Configuration conf;
protected final Path jobFile;
protected final int partition;
+ @SuppressWarnings("rawtypes")
protected final EventHandler eventHandler;
private final TaskAttemptId attemptId;
private final Clock clock;
@@ -204,6 +204,11 @@ public abstract class TaskAttemptImpl im
.addTransition(TaskAttemptState.ASSIGNED, TaskAttemptState.FAILED,
TaskAttemptEventType.TA_CONTAINER_LAUNCH_FAILED,
new DeallocateContainerTransition(TaskAttemptState.FAILED, false))
+ .addTransition(TaskAttemptState.ASSIGNED,
+ TaskAttemptState.FAIL_CONTAINER_CLEANUP,
+ TaskAttemptEventType.TA_CONTAINER_COMPLETED,
+ CLEANUP_CONTAINER_TRANSITION)
+ // ^ If RM kills the container due to expiry, preemption etc.
.addTransition(TaskAttemptState.ASSIGNED,
TaskAttemptState.KILL_CONTAINER_CLEANUP,
TaskAttemptEventType.TA_KILL, CLEANUP_CONTAINER_TRANSITION)
@@ -432,7 +437,8 @@ public abstract class TaskAttemptImpl im
//this is the last status reported by the REMOTE running attempt
private TaskAttemptStatus reportedStatus;
- public TaskAttemptImpl(TaskId taskId, int i, EventHandler eventHandler,
+ public TaskAttemptImpl(TaskId taskId, int i,
+ @SuppressWarnings("rawtypes") EventHandler eventHandler,
TaskAttemptListener taskAttemptListener, Path jobFile, int partition,
Configuration conf, String[] dataLocalHosts, OutputCommitter committer,
Token<JobTokenIdentifier> jobToken,
@@ -528,6 +534,13 @@ public abstract class TaskAttemptImpl im
ContainerLaunchContext container =
recordFactory.newRecordInstance(ContainerLaunchContext.class);
+ // Application resources
+ Map<String, LocalResource> localResources =
+ new HashMap<String, LocalResource>();
+
+ // Application environment
+ Map<String, String> environment = new HashMap<String, String>();
+
try {
FileSystem remoteFS = FileSystem.get(conf);
@@ -536,7 +549,7 @@ public abstract class TaskAttemptImpl im
Path remoteJobJar = (new Path(remoteTask.getConf().get(
MRJobConfig.JAR))).makeQualified(remoteFS.getUri(),
remoteFS.getWorkingDirectory());
- container.setLocalResource(
+ localResources.put(
MRConstants.JOB_JAR,
createLocalResource(remoteFS, recordFactory, remoteJobJar,
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
@@ -558,7 +571,7 @@ public abstract class TaskAttemptImpl im
new Path(path, oldJobId.toString());
Path remoteJobConfPath =
new Path(remoteJobSubmitDir, MRConstants.JOB_CONF_FILE);
- container.setLocalResource(
+ localResources.put(
MRConstants.JOB_CONF_FILE,
createLocalResource(remoteFS, recordFactory, remoteJobConfPath,
LocalResourceType.FILE, LocalResourceVisibility.APPLICATION));
@@ -566,9 +579,14 @@ public abstract class TaskAttemptImpl im
+ remoteJobConfPath.toUri().toASCIIString());
// //////////// End of JobConf setup
+
// Setup DistributedCache
- setupDistributedCache(remoteFS, conf, container);
+ setupDistributedCache(remoteFS, conf, localResources, environment);
+ // Set local-resources and environment
+ container.setLocalResources(localResources);
+ container.setEnv(environment);
+
// Setup up tokens
Credentials taskCredentials = new Credentials();
@@ -595,12 +613,12 @@ public abstract class TaskAttemptImpl im
// Add shuffle token
LOG.info("Putting shuffle token in serviceData");
- container
- .setServiceData(
- ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID,
- ShuffleHandler.serializeServiceData(jobToken));
+ Map<String, ByteBuffer> serviceData = new HashMap<String, ByteBuffer>();
+ serviceData.put(ShuffleHandler.MAPREDUCE_SHUFFLE_SERVICEID,
+ ShuffleHandler.serializeServiceData(jobToken));
+ container.setServiceData(serviceData);
- MRApps.addToClassPath(container.getAllEnv(), getInitialClasspath());
+ MRApps.addToClassPath(container.getEnv(), getInitialClasspath());
} catch (IOException e) {
throw new YarnException(e);
}
@@ -623,11 +641,11 @@ public abstract class TaskAttemptImpl im
classPaths.add(workDir.toString()); // TODO
// Construct the actual Container
- container.addAllCommands(MapReduceChildJVM.getVMCommand(
+ container.setCommands(MapReduceChildJVM.getVMCommand(
taskAttemptListener.getAddress(), remoteTask, javaHome,
workDir.toString(), containerLogDir, childTmpDir, jvmID));
- MapReduceChildJVM.setVMEnv(container.getAllEnv(), classPaths,
+ MapReduceChildJVM.setVMEnv(container.getEnv(), classPaths,
workDir.toString(), containerLogDir, nmLdLibraryPath, remoteTask,
localizedApplicationTokensFile);
@@ -649,11 +667,15 @@ public abstract class TaskAttemptImpl im
return result;
}
- private void setupDistributedCache(FileSystem remoteFS, Configuration conf,
- ContainerLaunchContext container) throws IOException {
+ private void setupDistributedCache(FileSystem remoteFS,
+ Configuration conf,
+ Map<String, LocalResource> localResources,
+ Map<String, String> env)
+ throws IOException {
// Cache archives
- parseDistributedCacheArtifacts(remoteFS, container, LocalResourceType.ARCHIVE,
+ parseDistributedCacheArtifacts(remoteFS, localResources, env,
+ LocalResourceType.ARCHIVE,
DistributedCache.getCacheArchives(conf),
parseTimeStamps(DistributedCache.getArchiveTimestamps(conf)),
getFileSizes(conf, MRJobConfig.CACHE_ARCHIVES_SIZES),
@@ -661,7 +683,9 @@ public abstract class TaskAttemptImpl im
DistributedCache.getArchiveClassPaths(conf));
// Cache files
- parseDistributedCacheArtifacts(remoteFS, container, LocalResourceType.FILE,
+ parseDistributedCacheArtifacts(remoteFS,
+ localResources, env,
+ LocalResourceType.FILE,
DistributedCache.getCacheFiles(conf),
parseTimeStamps(DistributedCache.getFileTimestamps(conf)),
getFileSizes(conf, MRJobConfig.CACHE_FILES_SIZES),
@@ -673,7 +697,10 @@ public abstract class TaskAttemptImpl im
// Use TaskDistributedCacheManager.CacheFiles.makeCacheFiles(URI[],
// long[], boolean[], Path[], FileType)
private void parseDistributedCacheArtifacts(
- FileSystem remoteFS, ContainerLaunchContext container, LocalResourceType type,
+ FileSystem remoteFS,
+ Map<String, LocalResource> localResources,
+ Map<String, String> env,
+ LocalResourceType type,
URI[] uris, long[] timestamps, long[] sizes, boolean visibilities[],
Path[] pathsToPutOnClasspath) throws IOException {
@@ -710,9 +737,9 @@ public abstract class TaskAttemptImpl im
throw new IllegalArgumentException("Resource name must be relative");
}
String linkName = name.toUri().getPath();
- container.setLocalResource(
+ localResources.put(
linkName,
- BuilderUtils.newLocalResource(recordFactory,
+ BuilderUtils.newLocalResource(
p.toUri(), type,
visibilities[i]
? LocalResourceVisibility.PUBLIC
@@ -720,8 +747,7 @@ public abstract class TaskAttemptImpl im
sizes[i], timestamps[i])
);
if (classPaths.containsKey(u.getPath())) {
- Map<String, String> environment = container.getAllEnv();
- MRApps.addToClassPath(environment, linkName);
+ MRApps.addToClassPath(env, linkName);
}
}
}
@@ -893,6 +919,7 @@ public abstract class TaskAttemptImpl im
}
}
+ @SuppressWarnings("unchecked")
@Override
public void handle(TaskAttemptEvent event) {
LOG.info("Processing " + event.getTaskAttemptID() +
@@ -903,7 +930,8 @@ public abstract class TaskAttemptImpl im
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitonException e) {
- LOG.error("Can't handle this event at current state", e);
+ LOG.error("Can't handle this event at current state for "
+ + this.attemptId, e);
eventHandler.handle(new JobDiagnosticsUpdateEvent(
this.attemptId.getTaskId().getJobId(), "Invalid event " + event.getType() +
" on TaskAttempt " + this.attemptId));
@@ -981,8 +1009,8 @@ public abstract class TaskAttemptImpl im
try {
if (progressSplitBlock == null) {
progressSplitBlock = new WrappedProgressSplitsBlock(conf.getInt(
- JHConfig.JOBHISTORY_TASKPROGRESS_NUMBER_SPLITS_KEY,
- WrappedProgressSplitsBlock.DEFAULT_NUMBER_PROGRESS_SPLITS));
+ MRJobConfig.MR_AM_NUM_PROGRESS_SPLITS,
+ MRJobConfig.DEFAULT_MR_AM_NUM_PROGRESS_SPLITS));
}
return progressSplitBlock;
} finally {
@@ -1035,6 +1063,7 @@ public abstract class TaskAttemptImpl im
public RequestContainerTransition(boolean rescheduled) {
this.rescheduled = rescheduled;
}
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1063,6 +1092,7 @@ public abstract class TaskAttemptImpl im
private static class ContainerAssignedTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(final TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1112,6 +1142,7 @@ public abstract class TaskAttemptImpl im
this.finalState = finalState;
this.withdrawsContainerRequest = withdrawsContainerRequest;
}
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1158,6 +1189,7 @@ public abstract class TaskAttemptImpl im
private static class LaunchedContainerTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent evnt) {
@@ -1208,6 +1240,7 @@ public abstract class TaskAttemptImpl im
private static class CommitPendingTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1219,6 +1252,7 @@ public abstract class TaskAttemptImpl im
private static class TaskCleanupTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1234,6 +1268,7 @@ public abstract class TaskAttemptImpl im
private static class SucceededTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1263,6 +1298,7 @@ public abstract class TaskAttemptImpl im
private static class FailedTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt, TaskAttemptEvent event) {
// set the finish time
@@ -1287,6 +1323,7 @@ public abstract class TaskAttemptImpl im
}
}
+ @SuppressWarnings({ "unchecked" })
private void logAttemptFinishedEvent(TaskAttemptState state) {
//Log finished events only if an attempt started.
if (getLaunchTime() == 0) return;
@@ -1320,6 +1357,7 @@ public abstract class TaskAttemptImpl im
private static class TooManyFetchFailureTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt, TaskAttemptEvent event) {
//add to diagnostic
@@ -1347,6 +1385,7 @@ public abstract class TaskAttemptImpl im
private static class KilledTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1373,6 +1412,7 @@ public abstract class TaskAttemptImpl im
private static class CleanupContainerTransition implements
SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
@@ -1399,6 +1439,7 @@ public abstract class TaskAttemptImpl im
private static class StatusUpdater
implements SingleArcTransition<TaskAttemptImpl, TaskAttemptEvent> {
+ @SuppressWarnings("unchecked")
@Override
public void transition(TaskAttemptImpl taskAttempt,
TaskAttemptEvent event) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TaskImpl.java Tue Sep 13 22:49:27 2011
@@ -528,7 +528,8 @@ public abstract class TaskImpl implement
try {
stateMachine.doTransition(event.getType(), event);
} catch (InvalidStateTransitonException e) {
- LOG.error("Can't handle this event at current state", e);
+ LOG.error("Can't handle this event at current state for "
+ + this.taskId, e);
internalError(event.getType());
}
if (oldState != getState()) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/launcher/ContainerLauncherImpl.java Tue Sep 13 22:49:27 2011
@@ -21,9 +21,9 @@ package org.apache.hadoop.mapreduce.v2.a
import java.io.IOException;
import java.nio.ByteBuffer;
import java.security.PrivilegedAction;
-import java.util.HashMap;
-import java.util.Map;
import java.util.concurrent.BlockingQueue;
+import java.util.concurrent.ConcurrentHashMap;
+import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.LinkedBlockingQueue;
import java.util.concurrent.ThreadPoolExecutor;
import java.util.concurrent.TimeUnit;
@@ -33,8 +33,8 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.ShuffleHandler;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptContainerLaunchedEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptDiagnosticsUpdateEvent;
@@ -79,8 +79,8 @@ public class ContainerLauncherImpl exten
private RecordFactory recordFactory;
//have a cache/map of UGIs so as to avoid creating too many RPC
//client connection objects to the same NodeManager
- private Map<String, UserGroupInformation> ugiMap =
- new HashMap<String, UserGroupInformation>();
+ private ConcurrentMap<String, UserGroupInformation> ugiMap =
+ new ConcurrentHashMap<String, UserGroupInformation>();
public ContainerLauncherImpl(AppContext context) {
super(ContainerLauncherImpl.class.getName());
@@ -102,7 +102,7 @@ public class ContainerLauncherImpl exten
public void start() {
launcherPool =
new ThreadPoolExecutor(getConfig().getInt(
- AMConstants.CONTAINERLAUNCHER_THREADPOOL_SIZE, 10),
+ MRJobConfig.MR_AM_CONTAINERLAUNCHER_THREAD_COUNT, 10),
Integer.MAX_VALUE, 1, TimeUnit.HOURS,
new LinkedBlockingQueue<Runnable>());
launcherPool.prestartAllCoreThreads(); // Wait for work.
@@ -142,22 +142,19 @@ public class ContainerLauncherImpl exten
UserGroupInformation user = UserGroupInformation.getCurrentUser();
- // TODO: Synchronization problems!!
if (UserGroupInformation.isSecurityEnabled()) {
- if(!ugiMap.containsKey(containerManagerBindAddr)) {
- Token<ContainerTokenIdentifier> token =
- new Token<ContainerTokenIdentifier>(
- containerToken.getIdentifier().array(),
- containerToken.getPassword().array(), new Text(
- containerToken.getKind()), new Text(
- containerToken.getService()));
- //the user in createRemoteUser in this context is not important
- user = UserGroupInformation.createRemoteUser(containerManagerBindAddr);
- user.addToken(token);
- ugiMap.put(containerManagerBindAddr, user);
- } else {
- user = ugiMap.get(containerManagerBindAddr);
- }
+
+ Token<ContainerTokenIdentifier> token = new Token<ContainerTokenIdentifier>(
+ containerToken.getIdentifier().array(), containerToken
+ .getPassword().array(), new Text(containerToken.getKind()),
+ new Text(containerToken.getService()));
+ // the user in createRemoteUser in this context is not important
+ UserGroupInformation ugi = UserGroupInformation
+ .createRemoteUser(containerManagerBindAddr);
+ ugi.addToken(token);
+ ugiMap.putIfAbsent(containerManagerBindAddr, ugi);
+
+ user = ugiMap.get(containerManagerBindAddr);
}
ContainerManager proxy =
user.doAs(new PrivilegedAction<ContainerManager>() {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/local/LocalContainerAllocator.java Tue Sep 13 22:49:27 2011
@@ -70,7 +70,7 @@ public class LocalContainerAllocator ext
if (event.getType() == ContainerAllocator.EventType.CONTAINER_REQ) {
LOG.info("Processing the event " + event.toString());
ContainerId cID = recordFactory.newRecordInstance(ContainerId.class);
- cID.setAppId(appID);
+ cID.setApplicationAttemptId(applicationAttemptId);
// use negative ids to denote that these are local. Need a better way ??
cID.setId((-1) * containerCount.getAndIncrement());
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMCommunicator.java Tue Sep 13 22:49:27 2011
@@ -25,12 +25,11 @@ import java.util.ArrayList;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
@@ -98,8 +97,8 @@ public class RMCommunicator extends Abst
public void init(Configuration conf) {
super.init(conf);
rmPollInterval =
- conf.getInt(AMConstants.AM_RM_SCHEDULE_INTERVAL,
- AMConstants.DEFAULT_AM_RM_SCHEDULE_INTERVAL);
+ conf.getInt(MRJobConfig.MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS,
+ MRJobConfig.DEFAULT_MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS);
}
@Override
@@ -226,8 +225,8 @@ public class RMCommunicator extends Abst
final YarnRPC rpc = YarnRPC.create(getConfig());
final Configuration conf = new Configuration(getConfig());
final String serviceAddr = conf.get(
- YarnConfiguration.SCHEDULER_ADDRESS,
- YarnConfiguration.DEFAULT_SCHEDULER_BIND_ADDRESS);
+ YarnConfiguration.RM_SCHEDULER_ADDRESS,
+ YarnConfiguration.DEFAULT_RM_SCHEDULER_ADDRESS);
UserGroupInformation currentUser;
try {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerAllocator.java Tue Sep 13 22:49:27 2011
@@ -40,7 +40,6 @@ import org.apache.hadoop.mapreduce.JobCo
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobCounterUpdateEvent;
@@ -54,6 +53,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.yarn.api.records.AMResponse;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.util.RackResolver;
@@ -137,11 +137,11 @@ public class RMContainerAllocator extend
MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART,
DEFAULT_COMPLETED_MAPS_PERCENT_FOR_REDUCE_SLOWSTART);
maxReduceRampupLimit = conf.getFloat(
- AMConstants.REDUCE_RAMPUP_UP_LIMIT,
- AMConstants.DEFAULT_REDUCE_RAMP_UP_LIMIT);
+ MRJobConfig.MR_AM_JOB_REDUCE_RAMPUP_UP_LIMIT,
+ MRJobConfig.DEFAULT_MR_AM_JOB_REDUCE_RAMP_UP_LIMIT);
maxReducePreemptionLimit = conf.getFloat(
- AMConstants.REDUCE_PREEMPTION_LIMIT,
- AMConstants.DEFAULT_REDUCE_PREEMPTION_LIMIT);
+ MRJobConfig.MR_AM_JOB_REDUCE_PREEMPTION_LIMIT,
+ MRJobConfig.DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT);
RackResolver.init(conf);
}
@@ -415,8 +415,8 @@ public class RMContainerAllocator extend
int headRoom = getAvailableResources() != null ? getAvailableResources().getMemory() : 0;//first time it would be null
AMResponse response = makeRemoteRequest();
int newHeadRoom = getAvailableResources() != null ? getAvailableResources().getMemory() : 0;
- List<Container> newContainers = response.getNewContainerList();
- List<Container> finishedContainers = response.getFinishedContainerList();
+ List<Container> newContainers = response.getAllocatedContainers();
+ List<ContainerStatus> finishedContainers = response.getCompletedContainersStatuses();
if (newContainers.size() + finishedContainers.size() > 0 || headRoom != newHeadRoom) {
//something changed
recalculateReduceSchedule = true;
@@ -427,12 +427,12 @@ public class RMContainerAllocator extend
allocatedContainers.add(cont);
LOG.debug("Received new Container :" + cont);
}
- for (Container cont : finishedContainers) {
+ for (ContainerStatus cont : finishedContainers) {
LOG.info("Received completed container " + cont);
- TaskAttemptId attemptID = assignedRequests.get(cont.getId());
+ TaskAttemptId attemptID = assignedRequests.get(cont.getContainerId());
if (attemptID == null) {
LOG.error("Container complete event for unknown container id "
- + cont.getId());
+ + cont.getContainerId());
} else {
assignedRequests.remove(attemptID);
if (attemptID.getTaskId().getTaskType().equals(TaskType.MAP)) {
@@ -444,7 +444,7 @@ public class RMContainerAllocator extend
eventHandler.handle(new TaskAttemptEvent(attemptID,
TaskAttemptEventType.TA_CONTAINER_COMPLETED));
// Send the diagnostics
- String diagnostics = cont.getContainerStatus().getDiagnostics();
+ String diagnostics = cont.getDiagnostics();
eventHandler.handle(new TaskAttemptDiagnosticsUpdateEvent(attemptID,
diagnostics));
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/rm/RMContainerRequestor.java Tue Sep 13 22:49:27 2011
@@ -31,13 +31,11 @@ import org.apache.commons.logging.LogFac
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.client.ClientService;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateRequest;
import org.apache.hadoop.yarn.api.protocolrecords.AllocateResponse;
import org.apache.hadoop.yarn.api.records.AMResponse;
-import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Priority;
import org.apache.hadoop.yarn.api.records.Resource;
@@ -102,7 +100,7 @@ public abstract class RMContainerRequest
public void init(Configuration conf) {
super.init(conf);
nodeBlacklistingEnabled =
- conf.getBoolean(AMConstants.NODE_BLACKLISTING_ENABLE, true);
+ conf.getBoolean(MRJobConfig.MR_AM_JOB_NODE_BLACKLISTING_ENABLE, true);
LOG.info("nodeBlacklistingEnabled:" + nodeBlacklistingEnabled);
maxTaskFailuresPerNode =
conf.getInt(MRJobConfig.MAX_TASK_FAILURES_PER_TRACKER, 3);
@@ -124,10 +122,11 @@ public abstract class RMContainerRequest
availableResources = response.getAvailableResources();
LOG.info("getResources() for " + applicationId + ":" + " ask="
- + ask.size() + " release= " + release.size() + " newContainers="
- + response.getNewContainerCount() + " finishedContainers="
- + response.getFinishedContainerCount()
- + " resourcelimit=" + availableResources);
+ + ask.size() + " release= " + release.size() +
+ " newContainers=" + response.getAllocatedContainers().size() +
+ " finishedContainers=" +
+ response.getCompletedContainersStatuses().size() +
+ " resourcelimit=" + availableResources);
ask.clear();
release.clear();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/DefaultSpeculator.java Tue Sep 13 22:49:27 2011
@@ -34,13 +34,13 @@ import java.util.concurrent.atomic.Atomi
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.MRConstants;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
@@ -117,7 +117,7 @@ public class DefaultSpeculator extends A
try {
// "yarn.mapreduce.job.task.runtime.estimator.class"
Class<? extends TaskRuntimeEstimator> estimatorClass
- = conf.getClass(AMConstants.TASK_RUNTIME_ESTIMATOR_CLASS,
+ = conf.getClass(MRJobConfig.MR_AM_TASK_ESTIMATOR,
LegacyTaskRuntimeEstimator.class,
TaskRuntimeEstimator.class);
@@ -128,16 +128,16 @@ public class DefaultSpeculator extends A
estimator.contextualize(conf, context);
} catch (InstantiationException ex) {
- LOG.error("Can't make a speculation runtime extimator" + ex);
+ LOG.error("Can't make a speculation runtime extimator", ex);
throw new YarnException(ex);
} catch (IllegalAccessException ex) {
- LOG.error("Can't make a speculation runtime extimator" + ex);
+ LOG.error("Can't make a speculation runtime extimator", ex);
throw new YarnException(ex);
} catch (InvocationTargetException ex) {
- LOG.error("Can't make a speculation runtime extimator" + ex);
+ LOG.error("Can't make a speculation runtime extimator", ex);
throw new YarnException(ex);
} catch (NoSuchMethodException ex) {
- LOG.error("Can't make a speculation runtime extimator" + ex);
+ LOG.error("Can't make a speculation runtime extimator", ex);
throw new YarnException(ex);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/ExponentiallySmoothedTaskRuntimeEstimator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/ExponentiallySmoothedTaskRuntimeEstimator.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/ExponentiallySmoothedTaskRuntimeEstimator.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/speculate/ExponentiallySmoothedTaskRuntimeEstimator.java Tue Sep 13 22:49:27 2011
@@ -23,8 +23,8 @@ import java.util.concurrent.ConcurrentMa
import java.util.concurrent.atomic.AtomicReference;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.app.AMConstants;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptStatusUpdateEvent.TaskAttemptStatus;
@@ -129,18 +129,15 @@ public class ExponentiallySmoothedTaskRu
return vectorRef.get();
}
- private static final long DEFAULT_EXPONENTIAL_SMOOTHING_LAMBDA_MILLISECONDS
- = 1000L * 60;
-
@Override
public void contextualize(Configuration conf, AppContext context) {
super.contextualize(conf, context);
lambda
- = conf.getLong(AMConstants.EXPONENTIAL_SMOOTHING_LAMBDA_MILLISECONDS,
- DEFAULT_EXPONENTIAL_SMOOTHING_LAMBDA_MILLISECONDS);
+ = conf.getLong(MRJobConfig.MR_AM_TASK_ESTIMATOR_SMOOTH_LAMBDA_MS,
+ MRJobConfig.DEFAULT_MR_AM_TASK_ESTIMATOR_SMNOOTH_LAMBDA_MS);
smoothedValue
- = conf.getBoolean(AMConstants.EXPONENTIAL_SMOOTHING_SMOOTH_RATE, true)
+ = conf.getBoolean(MRJobConfig.MR_AM_TASK_EXTIMATOR_EXPONENTIAL_RATE_ENABLE, true)
? SmoothedValue.RATE : SmoothedValue.TIME_PER_UNIT_PROGRESS;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/App.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/App.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/App.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/App.java Tue Sep 13 22:49:27 2011
@@ -28,11 +28,27 @@ import org.apache.hadoop.mapreduce.v2.ap
@RequestScoped
public class App {
final AppContext context;
- Job job;
- Task task;
+ private Job job;
+ private Task task;
@Inject
App(AppContext ctx) {
context = ctx;
}
+
+ void setJob(Job job) {
+ this.job = job;
+ }
+
+ public Job getJob() {
+ return job;
+ }
+
+ void setTask(Task task) {
+ this.task = task;
+ }
+
+ public Task getTask() {
+ return task;
+ }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java Tue Sep 13 22:49:27 2011
@@ -22,6 +22,8 @@ import static org.apache.hadoop.yarn.uti
import java.util.Locale;
+import javax.servlet.http.HttpServletResponse;
+
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
@@ -31,9 +33,13 @@ import org.apache.hadoop.yarn.conf.YarnC
import org.apache.hadoop.yarn.util.Apps;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.Controller;
+import org.apache.hadoop.yarn.webapp.View;
import com.google.inject.Inject;
+/**
+ * This class renders the various pages that the web app supports.
+ */
public class AppController extends Controller implements AMParams {
final App app;
@@ -41,7 +47,7 @@ public class AppController extends Contr
String title) {
super(ctx);
this.app = app;
- set(APP_ID, Apps.toString(app.context.getApplicationID()));
+ set(APP_ID, app.context.getApplicationID().toString());
set(RM_WEB, YarnConfiguration.getRMWebAppURL(conf));
}
@@ -50,10 +56,16 @@ public class AppController extends Contr
this(app, conf, ctx, "am");
}
+ /**
+ * Render the default(index.html) page for the Application Controller
+ */
@Override public void index() {
setTitle(join("MapReduce Application ", $(APP_ID)));
}
+ /**
+ * Render the /info page with an overview of current application.
+ */
public void info() {
info("Application Master Overview").
_("Application ID:", $(APP_ID)).
@@ -65,22 +77,52 @@ public class AppController extends Contr
render(InfoPage.class);
}
+ /**
+ * @return The class that will render the /job page
+ */
+ protected Class<? extends View> jobPage() {
+ return JobPage.class;
+ }
+
+ /**
+ * Render the /job page
+ */
public void job() {
requireJob();
- render(JobPage.class);
+ render(jobPage());
}
+ /**
+ * @return the class that will render the /jobcounters page
+ */
+ protected Class<? extends View> countersPage() {
+ return CountersPage.class;
+ }
+
+ /**
+ * Render the /jobcounters page
+ */
public void jobCounters() {
requireJob();
- if (app.job != null) {
+ if (app.getJob() != null) {
setTitle(join("Counters for ", $(JOB_ID)));
}
- render(CountersPage.class);
+ render(countersPage());
}
+ /**
+ * @return the class that will render the /tasks page
+ */
+ protected Class<? extends View> tasksPage() {
+ return TasksPage.class;
+ }
+
+ /**
+ * Render the /tasks page
+ */
public void tasks() {
requireJob();
- if (app.job != null) {
+ if (app.getJob() != null) {
try {
String tt = $(TASK_TYPE);
tt = tt.isEmpty() ? "All" : StringUtils.capitalize(MRApps.taskType(tt).
@@ -90,20 +132,40 @@ public class AppController extends Contr
badRequest(e.getMessage());
}
}
- render(TasksPage.class);
+ render(tasksPage());
+ }
+
+ /**
+ * @return the class that will render the /task page
+ */
+ protected Class<? extends View> taskPage() {
+ return TaskPage.class;
}
+ /**
+ * Render the /task page
+ */
public void task() {
requireTask();
- if (app.task != null) {
+ if (app.getTask() != null) {
setTitle(join("Attempts for ", $(TASK_ID)));
}
- render(TaskPage.class);
+ render(taskPage());
}
+ /**
+ * @return the class that will render the /attempts page
+ */
+ protected Class<? extends View> attemptsPage() {
+ return AttemptsPage.class;
+ }
+
+ /**
+ * Render the attempts page
+ */
public void attempts() {
requireJob();
- if (app.job != null) {
+ if (app.getJob() != null) {
try {
String taskType = $(TASK_TYPE);
if (taskType.isEmpty()) {
@@ -119,27 +181,38 @@ public class AppController extends Contr
badRequest(e.getMessage());
}
}
- render(AttemptsPage.class);
+ render(attemptsPage());
}
+ /**
+ * Render a BAD_REQUEST error.
+ * @param s the error message to include.
+ */
void badRequest(String s) {
- setStatus(response().SC_BAD_REQUEST);
+ setStatus(HttpServletResponse.SC_BAD_REQUEST);
setTitle(join("Bad request: ", s));
}
+ /**
+ * Render a NOT_FOUND error.
+ * @param s the error message to include.
+ */
void notFound(String s) {
- setStatus(response().SC_NOT_FOUND);
+ setStatus(HttpServletResponse.SC_NOT_FOUND);
setTitle(join("Not found: ", s));
}
+ /**
+ * Ensure that a JOB_ID was passed into the page.
+ */
void requireJob() {
try {
if ($(JOB_ID).isEmpty()) {
throw new RuntimeException("missing job ID");
}
JobId jobID = MRApps.toJobID($(JOB_ID));
- app.job = app.context.getJob(jobID);
- if (app.job == null) {
+ app.setJob(app.context.getJob(jobID));
+ if (app.getJob() == null) {
notFound($(JOB_ID));
}
} catch (Exception e) {
@@ -147,18 +220,21 @@ public class AppController extends Contr
}
}
+ /**
+ * Ensure that a TASK_ID was passed into the page.
+ */
void requireTask() {
try {
if ($(TASK_ID).isEmpty()) {
throw new RuntimeException("missing task ID");
}
TaskId taskID = MRApps.toTaskID($(TASK_ID));
- app.job = app.context.getJob(taskID.getJobId());
- if (app.job == null) {
+ app.setJob(app.context.getJob(taskID.getJobId()));
+ if (app.getJob() == null) {
notFound(MRApps.toString(taskID.getJobId()));
} else {
- app.task = app.job.getTask(taskID);
- if (app.task == null) {
+ app.setTask(app.getJob().getTask(taskID));
+ if (app.getTask() == null) {
notFound($(TASK_ID));
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AttemptsPage.java Tue Sep 13 22:49:27 2011
@@ -57,7 +57,7 @@ public class AttemptsPage extends TaskPa
String attemptStateStr = $(ATTEMPT_STATE);
TaskAttemptStateUI neededState = MRApps
.taskAttemptState(attemptStateStr);
- for (Task task : super.app.job.getTasks(taskType).values()) {
+ for (Task task : super.app.getJob().getTasks(taskType).values()) {
Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
for (TaskAttempt attempt : attempts.values()) {
if (neededState.correspondsTo(attempt.getState())) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/NavBlock.java Tue Sep 13 22:49:27 2011
@@ -45,8 +45,8 @@ public class NavBlock extends HtmlBlock
ul().
li().a(url("app/info"), "About")._().
li().a(url("app"), "Jobs")._()._();
- if (app.job != null) {
- String jobid = MRApps.toString(app.job.getID());
+ if (app.getJob() != null) {
+ String jobid = MRApps.toString(app.getJob().getID());
nav.
h3("Job").
ul().
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java Tue Sep 13 22:49:27 2011
@@ -98,11 +98,11 @@ public class TaskPage extends AppView {
}
protected boolean isValidRequest() {
- return app.task != null;
+ return app.getTask() != null;
}
protected Collection<TaskAttempt> getTaskAttempts() {
- return app.task.getAttempts().values();
+ return app.getTask().getAttempts().values();
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java Tue Sep 13 22:49:27 2011
@@ -42,7 +42,7 @@ public class TasksBlock extends HtmlBloc
}
@Override protected void render(Block html) {
- if (app.job == null) {
+ if (app.getJob() == null) {
html.
h2($(TITLE));
return;
@@ -63,7 +63,7 @@ public class TasksBlock extends HtmlBloc
th("Finish Time").
th("Elapsed Time")._()._().
tbody();
- for (Task task : app.job.getTasks().values()) {
+ for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRApp.java Tue Sep 13 22:49:27 2011
@@ -35,7 +35,6 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
-import org.apache.hadoop.mapreduce.v2.MRConstants;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
@@ -62,10 +61,8 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleaner;
import org.apache.hadoop.mapreduce.v2.app.taskclean.TaskCleanupEvent;
-import org.apache.hadoop.mapreduce.v2.jobhistory.JHConfig;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.security.Credentials;
-import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.Clock;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.ApplicationId;
@@ -134,8 +131,8 @@ public class MRApp extends MRAppMaster {
public Job submit(Configuration conf) throws Exception {
String user = conf.get(MRJobConfig.USER_NAME, "mapred");
conf.set(MRJobConfig.USER_NAME, user);
- conf.set(MRConstants.APPS_STAGING_DIR_KEY, testAbsPath.toString());
- conf.setBoolean(JHConfig.CREATE_HISTORY_INTERMEDIATE_BASE_DIR_KEY, true);
+ conf.set(MRJobConfig.MR_AM_STAGING_DIR, testAbsPath.toString());
+ conf.setBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true);
//TODO: fix the bug where the speculator gets events with
//not-fully-constructed objects. For now, disable speculative exec
LOG.info("****DISABLING SPECULATIVE EXECUTION*****");
@@ -326,7 +323,7 @@ public class MRApp extends MRAppMaster {
@Override
public void handle(ContainerAllocatorEvent event) {
ContainerId cId = recordFactory.newRecordInstance(ContainerId.class);
- cId.setAppId(getContext().getApplicationID());
+ cId.setApplicationAttemptId(getContext().getApplicationAttemptId());
cId.setId(containerCount++);
Container container = recordFactory.newRecordInstance(Container.class);
container.setId(cId);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MRAppBenchmark.java Tue Sep 13 22:49:27 2011
@@ -30,6 +30,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocator;
import org.apache.hadoop.mapreduce.v2.app.rm.ContainerAllocatorEvent;
import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.Container;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.NodeId;
@@ -124,12 +125,15 @@ public class MRAppBenchmark {
try {
if (concurrentRunningTasks < maxConcurrentRunningTasks) {
event = eventQueue.take();
- ContainerId cId = recordFactory.newRecordInstance(ContainerId.class);
- cId.setAppId(getContext().getApplicationID());
+ ContainerId cId =
+ recordFactory.newRecordInstance(ContainerId.class);
+ cId.setApplicationAttemptId(
+ getContext().getApplicationAttemptId());
cId.setId(containerCount++);
//System.out.println("Allocating " + containerCount);
- Container container = recordFactory.newRecordInstance(Container.class);
+ Container container =
+ recordFactory.newRecordInstance(Container.class);
container.setId(cId);
NodeId nodeId = recordFactory.newRecordInstance(NodeId.class);
nodeId.setHost("dummy");
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/MockJobs.java Tue Sep 13 22:49:27 2011
@@ -51,6 +51,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.MockApps;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.util.Records;
@@ -235,7 +236,11 @@ public class MockJobs extends MockApps {
@Override
public ContainerId getAssignedContainerID() {
ContainerId id = Records.newRecord(ContainerId.class);
- id.setAppId(taid.getTaskId().getJobId().getAppId());
+ ApplicationAttemptId appAttemptId =
+ Records.newRecord(ApplicationAttemptId.class);
+ appAttemptId.setApplicationId(taid.getTaskId().getJobId().getAppId());
+ appAttemptId.setAttemptId(0);
+ id.setApplicationAttemptId(appAttemptId);
return id;
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestFail.java Tue Sep 13 22:49:27 2011
@@ -18,6 +18,7 @@
package org.apache.hadoop.mapreduce.v2.app;
+import java.io.IOException;
import java.util.Iterator;
import java.util.Map;
@@ -36,6 +37,12 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEvent;
import org.apache.hadoop.mapreduce.v2.app.job.event.TaskAttemptEventType;
+import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncher;
+import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherEvent;
+import org.apache.hadoop.mapreduce.v2.app.launcher.ContainerLauncherImpl;
+import org.apache.hadoop.yarn.api.ContainerManager;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.api.records.ContainerToken;
import org.junit.Test;
/**
@@ -160,6 +167,74 @@ public class TestFail {
}
}
+ @Test
+ public void testTaskFailWithUnusedContainer() throws Exception {
+ MRApp app = new FailingTaskWithUnusedContainer();
+ Configuration conf = new Configuration();
+ int maxAttempts = 1;
+ conf.setInt(MRJobConfig.MAP_MAX_ATTEMPTS, maxAttempts);
+ // disable uberization (requires entire job to be reattempted, so max for
+ // subtask attempts is overridden to 1)
+ conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
+ Job job = app.submit(conf);
+ app.waitForState(job, JobState.RUNNING);
+ Map<TaskId, Task> tasks = job.getTasks();
+ Assert.assertEquals("Num tasks is not correct", 1, tasks.size());
+ Task task = tasks.values().iterator().next();
+ app.waitForState(task, TaskState.SCHEDULED);
+ Map<TaskAttemptId, TaskAttempt> attempts = tasks.values().iterator()
+ .next().getAttempts();
+ Assert.assertEquals("Num attempts is not correct", maxAttempts, attempts
+ .size());
+ TaskAttempt attempt = attempts.values().iterator().next();
+ app.waitForState(attempt, TaskAttemptState.ASSIGNED);
+ app.getDispatcher().getEventHandler().handle(
+ new TaskAttemptEvent(attempt.getID(),
+ TaskAttemptEventType.TA_CONTAINER_COMPLETED));
+ app.waitForState(job, JobState.FAILED);
+ }
+
+ static class FailingTaskWithUnusedContainer extends MRApp {
+
+ public FailingTaskWithUnusedContainer() {
+ super(1, 0, false, "TaskFailWithUnsedContainer", true);
+ }
+
+ protected ContainerLauncher createContainerLauncher(AppContext context,
+ boolean isLocal) {
+ return new ContainerLauncherImpl(context) {
+ @Override
+ public void handle(ContainerLauncherEvent event) {
+
+ switch (event.getType()) {
+ case CONTAINER_REMOTE_LAUNCH:
+ super.handle(event);
+ break;
+ case CONTAINER_REMOTE_CLEANUP:
+ getContext().getEventHandler().handle(
+ new TaskAttemptEvent(event.getTaskAttemptID(),
+ TaskAttemptEventType.TA_CONTAINER_CLEANED));
+ break;
+ }
+ }
+
+ @Override
+ protected ContainerManager getCMProxy(ContainerId containerID,
+ String containerManagerBindAddr, ContainerToken containerToken)
+ throws IOException {
+ try {
+ synchronized (this) {
+ wait(); // Just hang the thread simulating a very slow NM.
+ }
+ } catch (InterruptedException e) {
+ e.printStackTrace();
+ }
+ return null;
+ }
+ };
+ };
+ }
+
static class TimeOutTaskMRApp extends MRApp {
TimeOutTaskMRApp(int maps, int reduces) {
super(maps, reduces, false, "TimeOutTaskMRApp", true);
@@ -232,5 +307,6 @@ public class TestFail {
t.testTimedOutTask();
t.testMapFailureMaxPercent();
t.testReduceFailureMaxPercent();
+ t.testTaskFailWithUnusedContainer();
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRecovery.java Tue Sep 13 22:49:27 2011
@@ -131,7 +131,7 @@ public class TestRecovery {
//in rerun the 1st map will be recovered from previous run
app = new MRAppWithHistory(2, 1, false, this.getClass().getName(), false, ++runCount);
conf = new Configuration();
- conf.setBoolean(AMConstants.RECOVERY_ENABLE, true);
+ conf.setBoolean(MRJobConfig.MR_AM_JOB_RECOVERY_ENABLE, true);
conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, false);
job = app.submit(conf);
app.waitForState(job, JobState.RUNNING);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/webapp/TestAMWebApp.java Tue Sep 13 22:49:27 2011
@@ -108,7 +108,7 @@ public class TestAMWebApp {
Injector injector = WebAppTests.createMockInjector(AppContext.class, ctx);
AppController controller = injector.getInstance(AppController.class);
controller.index();
- assertEquals(Apps.toString(ctx.appID), controller.get(APP_ID,""));
+ assertEquals(ctx.appID.toString(), controller.get(APP_ID,""));
}
@Test public void testAppView() {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/MRConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/MRConstants.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/MRConstants.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/MRConstants.java Tue Sep 13 22:49:27 2011
@@ -25,9 +25,6 @@ import org.apache.hadoop.classification.
@InterfaceAudience.Private
@InterfaceStability.Evolving
public interface MRConstants {
-
- public static final String YARN_MR_PREFIX = "yarn.mapreduce.job.";
-
// This should be the directory where splits file gets localized on the node
// running ApplicationMaster.
public static final String JOB_SUBMIT_DIR = "jobSubmitDir";
@@ -45,8 +42,6 @@ public interface MRConstants {
public static final String YARN_MAPREDUCE_APP_JAR_PATH =
"$YARN_HOME/modules/" + HADOOP_MAPREDUCE_CLIENT_APP_JAR_NAME;
- public static final String APPS_STAGING_DIR_KEY = "yarn.apps.stagingDir";
-
// The token file for the application. Should contain tokens for access to
// remote file system and may optionally contain application specific tokens.
// For now, generated by the AppManagers and used by NodeManagers and the
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/jobhistory/JobHistoryUtils.java Tue Sep 13 22:49:27 2011
@@ -38,8 +38,8 @@ import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.fs.RemoteIterator;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.TypeConverter;
-import org.apache.hadoop.mapreduce.v2.MRConstants;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.net.NetUtils;
@@ -184,9 +184,9 @@ public class JobHistoryUtils {
public static String getConfiguredHistoryIntermediateDoneDirPrefix(
Configuration conf) {
String doneDirPrefix = conf
- .get(JHConfig.HISTORY_INTERMEDIATE_DONE_DIR_KEY);
+ .get(JHAdminConfig.MR_HISTORY_INTERMEDIATE_DONE_DIR);
if (doneDirPrefix == null) {
- doneDirPrefix = conf.get(MRConstants.APPS_STAGING_DIR_KEY)
+ doneDirPrefix = conf.get(MRJobConfig.MR_AM_STAGING_DIR)
+ "/history/done_intermediate";
}
return doneDirPrefix;
@@ -199,9 +199,9 @@ public class JobHistoryUtils {
*/
public static String getConfiguredHistoryServerDoneDirPrefix(
Configuration conf) {
- String doneDirPrefix = conf.get(JHConfig.HISTORY_DONE_DIR_KEY);
+ String doneDirPrefix = conf.get(JHAdminConfig.MR_HISTORY_DONE_DIR);
if (doneDirPrefix == null) {
- doneDirPrefix = conf.get(MRConstants.APPS_STAGING_DIR_KEY)
+ doneDirPrefix = conf.get(MRJobConfig.MR_AM_STAGING_DIR)
+ "/history/done";
}
return doneDirPrefix;
@@ -220,7 +220,7 @@ public class JobHistoryUtils {
public static boolean shouldCreateNonUserDirectory(Configuration conf) {
// Returning true by default to allow non secure single node clusters to work
// without any configuration change.
- return conf.getBoolean(JHConfig.CREATE_HISTORY_INTERMEDIATE_BASE_DIR_KEY, true);
+ return conf.getBoolean(MRJobConfig.MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR, true);
}
/**
@@ -478,8 +478,8 @@ public class JobHistoryUtils {
public static String getHistoryUrl(Configuration conf, ApplicationId appId)
throws UnknownHostException {
//construct the history url for job
- String hsAddress = conf.get(JHConfig.HS_WEBAPP_BIND_ADDRESS,
- JHConfig.DEFAULT_HS_WEBAPP_BIND_ADDRESS);
+ String hsAddress = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
InetSocketAddress address = NetUtils.createSocketAddr(hsAddress);
StringBuffer sb = new StringBuffer();
if (address.getAddress().isAnyLocalAddress() ||
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/security/client/ClientHSSecurityInfo.java Tue Sep 13 22:49:27 2011
@@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2.s
import java.lang.annotation.Annotation;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.mapreduce.v2.jobhistory.JHConfig;
+import org.apache.hadoop.mapreduce.v2.jobhistory.JHAdminConfig;
import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.SecurityInfo;
import org.apache.hadoop.security.token.TokenInfo;
@@ -44,7 +44,7 @@ public class ClientHSSecurityInfo extend
@Override
public String serverPrincipal() {
- return JHConfig.HS_SERVER_PRINCIPAL_KEY;
+ return JHAdminConfig.MR_HISTORY_PRINCIPAL;
}
@Override
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapreduce/v2/util/MRApps.java Tue Sep 13 22:49:27 2011
@@ -32,6 +32,7 @@ import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.MRConstants;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
@@ -157,6 +158,7 @@ public class MRApps extends Apps {
public static void setInitialClasspath(
Map<String, String> environment) throws IOException {
InputStream classpathFileStream = null;
+ BufferedReader reader = null;
try {
// Get yarn mapreduce-app classpath from generated classpath
// Works if compile time env is same as runtime. Mainly tests.
@@ -165,8 +167,7 @@ public class MRApps extends Apps {
String mrAppGeneratedClasspathFile = "mrapp-generated-classpath";
classpathFileStream =
thisClassLoader.getResourceAsStream(mrAppGeneratedClasspathFile);
- BufferedReader reader =
- new BufferedReader(new InputStreamReader(classpathFileStream));
+ reader = new BufferedReader(new InputStreamReader(classpathFileStream));
String cp = reader.readLine();
if (cp != null) {
addToClassPath(environment, cp.trim());
@@ -198,6 +199,9 @@ public class MRApps extends Apps {
if (classpathFileStream != null) {
classpathFileStream.close();
}
+ if (reader != null) {
+ reader.close();
+ }
}
// TODO: Remove duplicates.
}
@@ -218,7 +222,7 @@ public class MRApps extends Apps {
private static final String STAGING_CONSTANT = ".staging";
public static Path getStagingAreaDir(Configuration conf, String user) {
return new Path(
- conf.get(MRConstants.APPS_STAGING_DIR_KEY) +
+ conf.get(MRJobConfig.MR_AM_STAGING_DIR) +
Path.SEPARATOR + user + Path.SEPARATOR + STAGING_CONSTANT);
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java Tue Sep 13 22:49:27 2011
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.u
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.JobID;
+import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
@@ -113,7 +114,7 @@ public class TestMRApps {
@Test public void testGetJobFileWithUser() {
Configuration conf = new Configuration();
- conf.set(MRConstants.APPS_STAGING_DIR_KEY, "/my/path/to/staging");
+ conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging");
String jobFile = MRApps.getJobFile(conf, "dummy-user", new JobID("dummy-job", 12345));
assertNotNull("getJobFile results in null.", jobFile);
assertEquals("jobFile with specified user is not as expected.",
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/JobClient.java Tue Sep 13 22:49:27 2011
@@ -868,15 +868,6 @@ public class JobClient extends CLI {
Counters counters = Counters.downgrade(cntrs);
return counters.findCounter(counterGroupName, counterName).getValue();
}
-
- void displayJobList(JobStatus[] jobs) {
- System.out.printf("JobId\tState\tStartTime\tUserName\tQueue\tPriority\tSchedulingInfo\n");
- for (JobStatus job : jobs) {
- System.out.printf("%s\t%d\t%d\t%s\t%s\t%s\t%s\n", job.getJobID(), job.getRunState(),
- job.getStartTime(), job.getUsername(), job.getQueue(),
- job.getJobPriority().name(), job.getSchedulingInfo());
- }
- }
/**
* Get status information about the max available Maps in the cluster.
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Tue Sep 13 22:49:27 2011
@@ -301,4 +301,103 @@ public interface MRJobConfig {
"mapreduce.ubertask.child.ulimit"; // or mapreduce.uber.ulimit?
public static final String UBERTASK_ENV =
"mapreduce.ubertask.child.env"; // or mapreduce.uber.env?
+
+ public static final String MR_PREFIX = "yarn.app.mapreduce.";
+
+ public static final String MR_AM_PREFIX = MR_PREFIX + "am.";
+
+ /** The staging directory for map reduce.*/
+ public static final String MR_AM_STAGING_DIR =
+ MR_AM_PREFIX+"staging-dir";
+
+ /** The amount of memory the MR app master needs.*/
+ public static final String MR_AM_VMEM_MB =
+ MR_AM_PREFIX+"resource.mb";
+ public static final int DEFAULT_MR_AM_VMEM_MB = 2048;
+
+ /** Command line arguments passed to the MR app master.*/
+ public static final String MR_AM_COMMAND_OPTS =
+ MR_AM_PREFIX+"command-opts";
+ public static final String DEFAULT_MR_AM_COMMAND_OPTS = "-Xmx1536m";
+
+ /** Root Logging level passed to the MR app master.*/
+ public static final String MR_AM_LOG_OPTS =
+ MR_AM_PREFIX+"log-opts";
+ public static final String DEFAULT_MR_AM_LOG_OPTS = "INFO";
+
+ /**The number of splits when reporting progress in MR*/
+ public static final String MR_AM_NUM_PROGRESS_SPLITS =
+ MR_AM_PREFIX+"num-progress-splits";
+ public static final int DEFAULT_MR_AM_NUM_PROGRESS_SPLITS = 12;
+
+ /** Number of threads user to launch containers in the app master.*/
+ public static final String MR_AM_CONTAINERLAUNCHER_THREAD_COUNT =
+ MR_AM_PREFIX+"containerlauncher.thread-count";
+
+ /** Number of threads to handle job client RPC requests.*/
+ public static final String MR_AM_JOB_CLIENT_THREAD_COUNT =
+ MR_AM_PREFIX + "job.client.thread-count";
+ public static final int DEFAULT_MR_AM_JOB_CLIENT_THREAD_COUNT = 1;
+
+ /** Enable blacklisting of nodes in the job.*/
+ public static final String MR_AM_JOB_NODE_BLACKLISTING_ENABLE =
+ MR_AM_PREFIX + "job.node.blacklisting.enable";
+
+ /** Enable job recovery.*/
+ public static final String MR_AM_JOB_RECOVERY_ENABLE =
+ MR_AM_PREFIX + "job.recovery.enable";
+
+ /**
+ * Limit on the number of reducers that can be preempted to ensure that at
+ * least one map task can run if it needs to. Percentage between 0.0 and 1.0
+ */
+ public static final String MR_AM_JOB_REDUCE_PREEMPTION_LIMIT =
+ MR_AM_PREFIX + "job.reduce.preemption.limit";
+ public static final float DEFAULT_MR_AM_JOB_REDUCE_PREEMPTION_LIMIT = 0.5f;
+
+ /**
+ * Limit reduces starting until a certain percentage of maps have finished.
+ * Percentage between 0.0 and 1.0
+ */
+ public static final String MR_AM_JOB_REDUCE_RAMPUP_UP_LIMIT =
+ MR_AM_PREFIX + "job.reduce.rampup.limit";
+ public static final float DEFAULT_MR_AM_JOB_REDUCE_RAMP_UP_LIMIT = 0.5f;
+
+ /** The class that should be used for speculative execution calculations.*/
+ public static final String MR_AM_JOB_SPECULATOR =
+ MR_AM_PREFIX + "job.speculator.class";
+
+ /** Class used to estimate task resource needs.*/
+ public static final String MR_AM_TASK_ESTIMATOR =
+ MR_AM_PREFIX + "job.task.estimator.class";
+
+ /** The lambda value in the smoothing function of the task estimator.*/
+ public static final String MR_AM_TASK_ESTIMATOR_SMOOTH_LAMBDA_MS =
+ MR_AM_PREFIX
+ + "job.task.estimator.exponential.smooth.lambda-ms";
+
+ public static final long DEFAULT_MR_AM_TASK_ESTIMATOR_SMNOOTH_LAMBDA_MS =
+ 1000L * 60;
+
+ /** true if the smoothing rate should be exponential.*/
+ public static final String MR_AM_TASK_EXTIMATOR_EXPONENTIAL_RATE_ENABLE =
+ MR_AM_PREFIX + "job.task.estimator.exponential.smooth.rate";
+
+ /** The number of threads used to handle task RPC calls.*/
+ public static final String MR_AM_TASK_LISTENER_THREAD_COUNT =
+ MR_AM_PREFIX + "job.task.listener.thread-count";
+ public static final int DEFAULT_MR_AM_TASK_LISTENER_THREAD_COUNT = 10;
+
+ /** How often the AM should send heartbeats to the RM.*/
+ public static final String MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS =
+ MR_AM_PREFIX + "scheduler.heartbeat.interval-ms";
+ public static final int DEFAULT_MR_AM_TO_RM_HEARTBEAT_INTERVAL_MS = 2000;
+
+ /**
+ * Boolean. Create the base dirs in the JobHistoryEventHandler
+ * Set to false for multi-user clusters. This is an internal config that
+ * is set by the MR framework and read by it too.
+ */
+ public static final String MR_AM_CREATE_JH_INTERMEDIATE_BASE_DIR =
+ MR_AM_PREFIX + "create-intermediate-jh-base-dir";
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/tools/CLI.java Tue Sep 13 22:49:27 2011
@@ -523,7 +523,7 @@ public class CLI extends Configured impl
}
}
- protected void displayJobList(JobStatus[] jobs)
+ public void displayJobList(JobStatus[] jobs)
throws IOException, InterruptedException {
System.out.println("Total jobs:" + jobs.length);
System.out.println("JobId\tState\tStartTime\t" +
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/util/ResourceCalculatorPlugin.java Tue Sep 13 22:49:27 2011
@@ -97,7 +97,7 @@ public abstract class ResourceCalculator
@InterfaceStability.Unstable
public abstract ProcResourceValues getProcResourceValues();
- public class ProcResourceValues {
+ public static class ProcResourceValues {
private final long cumulativeCpuTime;
private final long physicalMemorySize;
private final long virtualMemorySize;
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml?rev=1170378&r1=1170377&r2=1170378&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/pom.xml Tue Sep 13 22:49:27 2011
@@ -43,6 +43,12 @@
<type>test-jar</type>
<scope>test</scope>
</dependency>
+ <dependency>
+ <groupId>org.apache.hadoop</groupId>
+ <artifactId>hadoop-yarn-common</artifactId>
+ <type>test-jar</type>
+ <scope>test</scope>
+ </dependency>
</dependencies>
<build>
<plugins>