You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2012/05/03 04:14:30 UTC
svn commit: r1333291 [2/4] - in
/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project: ./ conf/
dev-support/ hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/
hado...
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project:r1327719-1333290
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/CHANGES.txt?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/CHANGES.txt Thu May 3 02:14:01 2012
@@ -50,8 +50,13 @@ Trunk (unreleased changes)
MAPREDUCE-3935. Annotate Counters.Counter and Counters.Group as @Public.
(tomwhite)
+ HADOOP-8285 MR changes for Use ProtoBuf for RpcPayLoadHeader (sanjay radia)
+
BUG FIXES
+ MAPREDUCE-4100. [Gridmix] Bug fixed in compression emulation feature for
+ map only jobs. (amarrk)
+
MAPREDUCE-4149. [Rumen] Rumen fails to parse certain counter
strings. (ravigummadi)
@@ -146,6 +151,18 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4103. Fix HA docs for changes to shell command fencer args (todd)
+ MAPREDUCE-4093. Improve RM WebApp start up when proxy address is not set
+ (Devaraj K vai bobby)
+
+ MAPREDUCE-4138. Reduce memory usage of counters due to non-static nested
+ classes. (tomwhite)
+
+ MAPREDUCE-3883. Document yarn.nodemanager.delete.debug-delay-sec
+ configuration property (Eugene Koontz via tgraves)
+
+ MAPREDUCE-4219. make default container-executor.conf.dir be a path
+ relative to the container-executor binary. (rvs via tucu)
+
OPTIMIZATIONS
BUG FIXES
@@ -247,6 +264,20 @@ Release 2.0.0 - UNRELEASED
MAPREDUCE-4008. ResourceManager throws MetricsException on start up
saying QueueMetrics MBean already exists (Devaraj K via tgraves)
+ MAPREDUCE-3867. MiniMRYarn/MiniYarn uses fixed ports (tucu)
+
+ MAPREDUCE-4141. clover integration broken, also mapreduce poms are
+ pulling in clover as a dependency. (phunt via tucu)
+
+ MAPREDUCE-4193. broken doc link for yarn-default.xml in site.xml.
+ (phunt via tomwhite)
+
+ MAPREDUCE-4202. TestYarnClientProtocolProvider is broken (Daryn Sharp via
+ bobby)
+
+ MAPREDUCE-3173. MRV2 UI doesn't work properly without internet (Devaraj K
+ via bobby)
+
Release 0.23.3 - UNRELEASED
INCOMPATIBLE CHANGES
@@ -254,6 +285,9 @@ Release 0.23.3 - UNRELEASED
MAPREDUCE-4072. User set java.library.path seems to overwrite default
creating problems native lib loading (Anupam Seth via bobby)
+ MAPREDUCE-3812. Lower default allocation sizes, fix allocation
+ configurations and document them (Harsh J via bobby)
+
NEW FEATURES
IMPROVEMENTS
@@ -276,6 +310,11 @@ Release 0.23.3 - UNRELEASED
MAPREDUCE-4161. create sockets consistently (Daryn Sharp via bobby)
+ MAPREDUCE-4079. Allow MR AppMaster to limit ephemeral port range.
+ (bobby via tgraves)
+
+ MAPREDUCE-4210. Expose listener address for WebApp (Daryn Sharp via bobby)
+
OPTIMIZATIONS
BUG FIXES
@@ -364,6 +403,50 @@ Release 0.23.3 - UNRELEASED
MAPREDUCE-4160. some mrv1 ant tests fail with timeout - due to 4156
(tgraves)
+ MAPREDUCE-4074. Client continuously retries to RM When RM goes down
+ before launching Application Master (xieguiming via tgraves)
+
+ MAPREDUCE-4159. Job is running in Uber mode after setting
+ "mapreduce.job.ubertask.maxreduces" to zero (Devaraj K via bobby)
+
+ MAPREDUCE-4165. Committing is misspelled as commiting in task logs
+ (John Eagles via bobby)
+
+ MAPREDUCE-4129. Lots of unneeded counters log messages (Ahmed Radwan via
+ bobby)
+
+ MAPREDUCE-3947. yarn.app.mapreduce.am.resource.mb not documented
+ (Devaraj K via bobby)
+
+ MAPREDUCE-4190. Improve web UI for task attempts userlog link (Tom Graves
+ via bobby)
+
+ MAPREDUCE-4133. MR over viewfs is broken (John George via bobby)
+
+ MAPREDUCE-4194. ConcurrentModificationError in DirectoryCollection
+ (Jonathan Eagles via bobby)
+
+ MAPREDUCE-3613. web service calls header contains 2 content types
+ (tgraves)
+
+ MAPREDUCE-4169. Container Logs appear in unsorted order (Jonathan Eagles
+ via bobby)
+
+ MAPREDUCE-4189. TestContainerManagerSecurity is failing (Devaraj K via
+ bobby)
+
+ MAPREDUCE-4209. junit dependency in hadoop-mapreduce-client is missing
+ scope test (Radim Kolar via bobby)
+
+ MAPREDUCE-4206. Sorting by Last Health-Update on the RM nodes page sorts
+ does not work correctly (Jonathon Eagles via tgraves)
+
+ MAPREDUCE-4212. TestJobClientGetJob sometimes fails
+ (Daryn Sharp via tgraves)
+
+ MAPREDUCE-4211. Error conditions (missing appid, appid not found) are
+ masked in the RM app page (Jonathan Eagles via bobby)
+
Release 0.23.2 - UNRELEASED
INCOMPATIBLE CHANGES
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/CHANGES.txt
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt:r1327719-1333290
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/conf/
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/conf:r1327719-1333290
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java Thu May 3 02:14:01 2012
@@ -139,7 +139,8 @@ public class MRClientService extends Abs
rpc.getServer(MRClientProtocol.class, protocolHandler, address,
conf, secretManager,
conf.getInt(MRJobConfig.MR_AM_JOB_CLIENT_THREAD_COUNT,
- MRJobConfig.DEFAULT_MR_AM_JOB_CLIENT_THREAD_COUNT));
+ MRJobConfig.DEFAULT_MR_AM_JOB_CLIENT_THREAD_COUNT),
+ MRJobConfig.MR_AM_JOB_CLIENT_PORT_RANGE);
// Enable service authorization?
if (conf.getBoolean(
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/job/impl/JobImpl.java Thu May 3 02:14:01 2012
@@ -822,12 +822,12 @@ public class JobImpl implements org.apac
//FIXME: handling multiple reduces within a single AM does not seem to
//work.
- // int sysMaxReduces =
- // job.conf.getInt(MRJobConfig.JOB_UBERTASK_MAXREDUCES, 1);
- int sysMaxReduces = 1;
+ int sysMaxReduces = conf.getInt(MRJobConfig.JOB_UBERTASK_MAXREDUCES, 1);
+ boolean isValidUberMaxReduces = (sysMaxReduces == 0)
+ || (sysMaxReduces == 1);
long sysMaxBytes = conf.getLong(MRJobConfig.JOB_UBERTASK_MAXBYTES,
- fs.getDefaultBlockSize()); // FIXME: this is wrong; get FS from
+ fs.getDefaultBlockSize(this.remoteJobSubmitDir)); // FIXME: this is wrong; get FS from
// [File?]InputFormat and default block size
// from that
@@ -856,7 +856,7 @@ public class JobImpl implements org.apac
// while "uber-AM" (MR AM + LocalContainerLauncher) loops over tasks
// and thus requires sequential execution.
isUber = uberEnabled && smallNumMapTasks && smallNumReduceTasks
- && smallInput && smallMemory && notChainJob;
+ && smallInput && smallMemory && notChainJob && isValidUberMaxReduces;
if (isUber) {
LOG.info("Uberizing job " + jobId + ": " + numMapTasks + "m+"
@@ -889,7 +889,9 @@ public class JobImpl implements org.apac
if (!smallMemory)
msg.append(" too much RAM;");
if (!notChainJob)
- msg.append(" chainjob");
+ msg.append(" chainjob;");
+ if (!isValidUberMaxReduces)
+ msg.append(" not supported uber max reduces");
LOG.info(msg.toString());
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java Thu May 3 02:14:01 2012
@@ -21,6 +21,7 @@ package org.apache.hadoop.mapreduce.v2.a
import java.io.IOException;
import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -67,6 +68,8 @@ import com.google.inject.Inject;
public class AMWebServices {
private final AppContext appCtx;
private final App app;
+
+ private @Context HttpServletResponse response;
@Inject
public AMWebServices(final App app, final AppContext context) {
@@ -86,6 +89,11 @@ public class AMWebServices {
return true;
}
+ private void init() {
+ //clear content type
+ response.setContentType(null);
+ }
+
/**
* convert a job id string to an actual job and handle all the error checking.
*/
@@ -205,6 +213,7 @@ public class AMWebServices {
@Path("/info")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AppInfo getAppInfo() {
+ init();
return new AppInfo(this.app, this.app.context);
}
@@ -212,6 +221,7 @@ public class AMWebServices {
@Path("/jobs")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobsInfo getJobs(@Context HttpServletRequest hsr) {
+ init();
JobsInfo allJobs = new JobsInfo();
for (Job job : appCtx.getAllJobs().values()) {
// getAllJobs only gives you a partial we want a full
@@ -229,6 +239,7 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobInfo getJob(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
return new JobInfo(job, hasAccess(job, hsr));
}
@@ -237,7 +248,7 @@ public class AMWebServices {
@Path("/jobs/{jobid}/jobattempts")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
-
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
@@ -253,6 +264,7 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
return new JobCounterInfo(this.appCtx, job);
@@ -264,6 +276,7 @@ public class AMWebServices {
public ConfInfo getJobConf(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
ConfInfo info;
@@ -282,6 +295,7 @@ public class AMWebServices {
public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @QueryParam("type") String type) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
TasksInfo allTasks = new TasksInfo();
@@ -308,6 +322,7 @@ public class AMWebServices {
public TaskInfo getJobTask(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
@@ -321,6 +336,7 @@ public class AMWebServices {
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
@PathParam("taskid") String tid) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
@@ -332,8 +348,9 @@ public class AMWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
- TaskAttemptsInfo attempts = new TaskAttemptsInfo();
+ init();
+ TaskAttemptsInfo attempts = new TaskAttemptsInfo();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
@@ -357,6 +374,7 @@ public class AMWebServices {
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
@PathParam("attemptid") String attId) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
@@ -375,6 +393,7 @@ public class AMWebServices {
@Context HttpServletRequest hsr, @PathParam("jobid") String jid,
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
+ init();
Job job = getJobFromJobIdString(jid, appCtx);
checkAccess(job, hsr);
Task task = getTaskFromTaskIdString(tid, job);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java Thu May 3 02:14:01 2012
@@ -66,7 +66,8 @@ public class TaskPage extends AppView {
th(".id", "Attempt").
th(".progress", "Progress").
th(".state", "State").
- th(".node", "node").
+ th(".node", "Node").
+ th(".logs", "Logs").
th(".tsh", "Started").
th(".tsh", "Finished").
th(".tsh", "Elapsed").
@@ -83,24 +84,28 @@ public class TaskPage extends AppView {
long finishTime = ta.getFinishTime();
long elapsed = ta.getElapsedTime();
String diag = ta.getNote() == null ? "" : ta.getNote();
- TD<TR<TBODY<TABLE<Hamlet>>>> nodeTd = tbody.
- tr().
- td(".id", taid).
- td(".progress", progress).
- td(".state", ta.getState()).td();
+ TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
+ TD<TR<TBODY<TABLE<Hamlet>>>> nodeTd = row.
+ td(".id", taid).
+ td(".progress", progress).
+ td(".state", ta.getState()).td();
if (nodeHttpAddr == null) {
nodeTd._("N/A");
} else {
nodeTd.
a(".nodelink", url("http://", nodeHttpAddr), nodeHttpAddr);
}
+ nodeTd._();
if (containerId != null) {
String containerIdStr = ta.getAssignedContainerIdStr();
- nodeTd._(" ").
+ row.td().
a(".logslink", url("http://", nodeHttpAddr, "node", "containerlogs",
- containerIdStr, app.getJob().getUserName()), "logs");
+ containerIdStr, app.getJob().getUserName()), "logs")._();
+ } else {
+ row.td()._("N/A")._();
}
- nodeTd._().
+
+ row.
td(".ts", Times.format(startTime)).
td(".ts", Times.format(finishTime)).
td(".dt", StringUtils.formatTime(elapsed)).
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/TestRMContainerAllocator.java Thu May 3 02:14:01 2012
@@ -65,6 +65,7 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.ResourceRequest;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.event.DrainDispatcher;
import org.apache.hadoop.yarn.event.Event;
@@ -341,7 +342,7 @@ public class TestRMContainerAllocator {
}
@Override
protected ResourceScheduler createScheduler() {
- return new MyFifoScheduler(getRMContext());
+ return new MyFifoScheduler();
}
}
@@ -997,18 +998,6 @@ public class TestRMContainerAllocator {
}
private static class MyFifoScheduler extends FifoScheduler {
-
- public MyFifoScheduler(RMContext rmContext) {
- super();
- try {
- reinitialize(new Configuration(), new ContainerTokenSecretManager(),
- rmContext);
- } catch (IOException ie) {
- LOG.info("add application failed with ", ie);
- assert (false);
- }
- }
-
// override this to copy the objects otherwise FifoScheduler updates the
// numContainers in same objects as kept by RMContainerAllocator
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/test/java/org/apache/hadoop/mapreduce/v2/app/job/impl/TestJobImpl.java Thu May 3 02:14:01 2012
@@ -37,14 +37,20 @@ import org.apache.hadoop.mapreduce.MRJob
import org.apache.hadoop.mapreduce.OutputCommitter;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.jobhistory.JobHistoryEvent;
+import org.apache.hadoop.mapreduce.security.token.JobTokenSecretManager;
+import org.apache.hadoop.mapreduce.split.JobSplit.TaskSplitMetaInfo;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.event.JobEvent;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl.InitTransition;
import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl.JobNoTasksCompletedTransition;
+import org.apache.hadoop.mapreduce.v2.app.metrics.MRAppMetrics;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
import org.apache.hadoop.yarn.event.EventHandler;
+import org.apache.hadoop.yarn.util.Records;
import org.junit.Assert;
import org.junit.Test;
@@ -233,4 +239,69 @@ public class TestJobImpl {
Assert.assertTrue(job5.checkAccess(ugi1, null));
Assert.assertTrue(job5.checkAccess(ugi2, null));
}
+ @Test
+ public void testUberDecision() throws Exception {
+
+ // with default values, no of maps is 2
+ Configuration conf = new Configuration();
+ boolean isUber = testUberDecision(conf);
+ Assert.assertFalse(isUber);
+
+ // enable uber mode, no of maps is 2
+ conf = new Configuration();
+ conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, true);
+ isUber = testUberDecision(conf);
+ Assert.assertTrue(isUber);
+
+ // enable uber mode, no of maps is 2, no of reduces is 1 and uber task max
+ // reduces is 0
+ conf = new Configuration();
+ conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, true);
+ conf.setInt(MRJobConfig.JOB_UBERTASK_MAXREDUCES, 0);
+ conf.setInt(MRJobConfig.NUM_REDUCES, 1);
+ isUber = testUberDecision(conf);
+ Assert.assertFalse(isUber);
+
+ // enable uber mode, no of maps is 2, no of reduces is 1 and uber task max
+ // reduces is 1
+ conf = new Configuration();
+ conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, true);
+ conf.setInt(MRJobConfig.JOB_UBERTASK_MAXREDUCES, 1);
+ conf.setInt(MRJobConfig.NUM_REDUCES, 1);
+ isUber = testUberDecision(conf);
+ Assert.assertTrue(isUber);
+
+ // enable uber mode, no of maps is 2 and uber task max maps is 0
+ conf = new Configuration();
+ conf.setBoolean(MRJobConfig.JOB_UBERTASK_ENABLE, true);
+ conf.setInt(MRJobConfig.JOB_UBERTASK_MAXMAPS, 1);
+ isUber = testUberDecision(conf);
+ Assert.assertFalse(isUber);
+ }
+
+ private boolean testUberDecision(Configuration conf) {
+ JobID jobID = JobID.forName("job_1234567890000_0001");
+ JobId jobId = TypeConverter.toYarn(jobID);
+ MRAppMetrics mrAppMetrics = MRAppMetrics.create();
+ JobImpl job = new JobImpl(jobId, Records
+ .newRecord(ApplicationAttemptId.class), conf, mock(EventHandler.class),
+ null, mock(JobTokenSecretManager.class), null, null, null,
+ mrAppMetrics, mock(OutputCommitter.class), true, null, 0, null, null);
+ InitTransition initTransition = getInitTransition();
+ JobEvent mockJobEvent = mock(JobEvent.class);
+ initTransition.transition(job, mockJobEvent);
+ boolean isUber = job.isUber();
+ return isUber;
+ }
+
+ private InitTransition getInitTransition() {
+ InitTransition initTransition = new InitTransition() {
+ @Override
+ protected TaskSplitMetaInfo[] createSplits(JobImpl job, JobId jobId) {
+ return new TaskSplitMetaInfo[] { new TaskSplitMetaInfo(),
+ new TaskSplitMetaInfo() };
+ }
+ };
+ return initTransition;
+ }
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalClientProtocolProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalClientProtocolProvider.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalClientProtocolProvider.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/main/java/org/apache/hadoop/mapred/LocalClientProtocolProvider.java Thu May 3 02:14:01 2012
@@ -33,8 +33,9 @@ public class LocalClientProtocolProvider
@Override
public ClientProtocol create(Configuration conf) throws IOException {
- String framework = conf.get(MRConfig.FRAMEWORK_NAME);
- if (framework != null && !framework.equals(MRConfig.LOCAL_FRAMEWORK_NAME)) {
+ String framework =
+ conf.get(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
+ if (!MRConfig.LOCAL_FRAMEWORK_NAME.equals(framework)) {
return null;
}
String tracker = conf.get(JTConfig.JT_IPC_ADDRESS, "local");
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java Thu May 3 02:14:01 2012
@@ -49,8 +49,9 @@ public class TestJobClientGetJob {
JobConf conf = new JobConf();
conf.set("mapreduce.framework.name", "local");
FileInputFormat.addInputPath(conf, createTempFile("in", "hello"));
- FileOutputFormat.setOutputPath(conf,
- new Path(TEST_ROOT_DIR, getClass().getSimpleName()));
+ Path outputDir = new Path(TEST_ROOT_DIR, getClass().getSimpleName());
+ outputDir.getFileSystem(conf).delete(outputDir, true);
+ FileOutputFormat.setOutputPath(conf, outputDir);
JobClient jc = new JobClient(conf);
RunningJob runningJob = jc.submitJob(conf);
assertNotNull("Running job", runningJob);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Counters.java Thu May 3 02:14:01 2012
@@ -387,21 +387,13 @@ public class Counters
private static class FrameworkGroupImpl<T extends Enum<T>>
extends FrameworkCounterGroup<T, Counter> {
- // Mix the framework counter implementation into the Counter interface
- class FrameworkCounterImpl extends FrameworkCounter {
- FrameworkCounterImpl(T key) {
- super(key);
- }
-
- }
-
FrameworkGroupImpl(Class<T> cls) {
super(cls);
}
@Override
protected Counter newCounter(T key) {
- return new Counter(new FrameworkCounterImpl(key));
+ return new Counter(new FrameworkCounter<T>(key, getName()));
}
@Override
@@ -413,17 +405,9 @@ public class Counters
// Mix the file system counter group implementation into the Group interface
private static class FSGroupImpl extends FileSystemCounterGroup<Counter> {
- private class FSCounterImpl extends FSCounter {
-
- FSCounterImpl(String scheme, FileSystemCounter key) {
- super(scheme, key);
- }
-
- }
-
@Override
protected Counter newCounter(String scheme, FileSystemCounter key) {
- return new Counter(new FSCounterImpl(scheme, key));
+ return new Counter(new FSCounter(scheme, key));
}
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/Task.java Thu May 3 02:14:01 2012
@@ -964,7 +964,7 @@ abstract public class Task implements Wr
TaskReporter reporter
) throws IOException, InterruptedException {
LOG.info("Task:" + taskId + " is done."
- + " And is in the process of commiting");
+ + " And is in the process of committing");
updateCounters();
boolean commitRequired = isCommitRequired();
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Counters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Counters.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Counters.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/Counters.java Thu May 3 02:14:01 2012
@@ -49,8 +49,8 @@ public class Counters extends AbstractCo
}
@Override
- protected FrameworkCounter newCounter(T key) {
- return new FrameworkCounter(key);
+ protected FrameworkCounter<T> newCounter(T key) {
+ return new FrameworkCounter<T>(key, getName());
}
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/MRJobConfig.java Thu May 3 02:14:01 2012
@@ -325,6 +325,13 @@ public interface MRJobConfig {
MR_PREFIX + "client-am.ipc.max-retries";
public static final int DEFAULT_MR_CLIENT_TO_AM_IPC_MAX_RETRIES = 3;
+ /**
+ * The number of client retries to the RM/HS/AM before throwing exception.
+ */
+ public static final String MR_CLIENT_MAX_RETRIES =
+ MR_PREFIX + "client.max-retries";
+ public static final int DEFAULT_MR_CLIENT_MAX_RETRIES = 3;
+
/** The staging directory for map reduce.*/
public static final String MR_AM_STAGING_DIR =
MR_AM_PREFIX+"staging-dir";
@@ -334,12 +341,12 @@ public interface MRJobConfig {
/** The amount of memory the MR app master needs.*/
public static final String MR_AM_VMEM_MB =
MR_AM_PREFIX+"resource.mb";
- public static final int DEFAULT_MR_AM_VMEM_MB = 2048;
+ public static final int DEFAULT_MR_AM_VMEM_MB = 1536;
/** Command line arguments passed to the MR app master.*/
public static final String MR_AM_COMMAND_OPTS =
MR_AM_PREFIX+"command-opts";
- public static final String DEFAULT_MR_AM_COMMAND_OPTS = "-Xmx1536m";
+ public static final String DEFAULT_MR_AM_COMMAND_OPTS = "-Xmx1024m";
/** Root Logging level passed to the MR app master.*/
public static final String MR_AM_LOG_LEVEL =
@@ -366,6 +373,13 @@ public interface MRJobConfig {
MR_AM_PREFIX + "job.client.thread-count";
public static final int DEFAULT_MR_AM_JOB_CLIENT_THREAD_COUNT = 1;
+ /**
+ * Range of ports that the MapReduce AM can use when binding. Leave blank
+ * if you want all possible ports.
+ */
+ public static final String MR_AM_JOB_CLIENT_PORT_RANGE =
+ MR_AM_PREFIX + "job.client.port-range";
+
/** Enable blacklisting of nodes in the job.*/
public static final String MR_AM_JOB_NODE_BLACKLISTING_ENABLE =
MR_AM_PREFIX + "job.node-blacklisting.enable";
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/AbstractCounters.java Thu May 3 02:14:01 2012
@@ -201,7 +201,15 @@ public abstract class AbstractCounters<C
* @return the group
*/
public synchronized G getGroup(String groupName) {
- String newGroupName = filterGroupName(groupName);
+
+ // filterGroupName
+ boolean groupNameInLegacyMap = true;
+ String newGroupName = legacyMap.get(groupName);
+ if (newGroupName == null) {
+ groupNameInLegacyMap = false;
+ newGroupName = Limits.filterGroupName(groupName);
+ }
+
boolean isFGroup = isFrameworkGroup(newGroupName);
G group = isFGroup ? fgroups.get(newGroupName) : groups.get(newGroupName);
if (group == null) {
@@ -212,19 +220,14 @@ public abstract class AbstractCounters<C
limits.checkGroups(groups.size() + 1);
groups.put(newGroupName, group);
}
+ if (groupNameInLegacyMap) {
+ LOG.warn("Group " + groupName + " is deprecated. Use " + newGroupName
+ + " instead");
+ }
}
return group;
}
- private String filterGroupName(String oldName) {
- String newName = legacyMap.get(oldName);
- if (newName == null) {
- return Limits.filterGroupName(oldName);
- }
- LOG.warn("Group "+ oldName +" is deprecated. Use "+ newName +" instead");
- return newName;
- }
-
/**
* Returns the total number of counters, by summing the number of counters
* in each group.
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FileSystemCounterGroup.java Thu May 3 02:14:01 2012
@@ -61,7 +61,7 @@ public abstract class FileSystemCounterG
private static final Joiner DISP_JOINER = Joiner.on(": ");
@InterfaceAudience.Private
- public class FSCounter extends AbstractCounter {
+ public static class FSCounter extends AbstractCounter {
final String scheme;
final FileSystemCounter key;
private long value;
@@ -139,8 +139,7 @@ public abstract class FileSystemCounterG
@Override
public void addCounter(C counter) {
C ours;
- if (counter instanceof FileSystemCounterGroup<?>.FSCounter) {
- @SuppressWarnings("unchecked")
+ if (counter instanceof FileSystemCounterGroup.FSCounter) {
FSCounter c = (FSCounter) counter;
ours = findCounter(c.scheme, c.key);
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/counters/FrameworkCounterGroup.java Thu May 3 02:14:01 2012
@@ -57,12 +57,14 @@ public abstract class FrameworkCounterGr
* Use old (which extends new) interface to make compatibility easier.
*/
@InterfaceAudience.Private
- public class FrameworkCounter extends AbstractCounter {
+ public static class FrameworkCounter<T extends Enum<T>> extends AbstractCounter {
final T key;
+ final String groupName;
private long value;
- public FrameworkCounter(T ref) {
+ public FrameworkCounter(T ref, String groupName) {
key = ref;
+ this.groupName = groupName;
}
@Override
@@ -72,7 +74,7 @@ public abstract class FrameworkCounterGr
@Override
public String getDisplayName() {
- return localizeCounterName(getName());
+ return ResourceBundles.getCounterName(groupName, getName(), getName());
}
@Override
@@ -131,10 +133,6 @@ public abstract class FrameworkCounterGr
this.displayName = displayName;
}
- private String localizeCounterName(String counterName) {
- return ResourceBundles.getCounterName(getName(), counterName, counterName);
- }
-
private T valueOf(String name) {
return Enum.valueOf(enumClass, name);
}
@@ -204,7 +202,7 @@ public abstract class FrameworkCounterGr
if (checkNotNull(other, "other counter group")
instanceof FrameworkCounterGroup<?, ?>) {
for (Counter counter : other) {
- findCounter(((FrameworkCounter) counter).key)
+ findCounter(((FrameworkCounter) counter).key.name())
.increment(counter.getValue());
}
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml Thu May 3 02:14:01 2012
@@ -1237,6 +1237,14 @@
</property>
<property>
+ <name>yarn.app.mapreduce.am.job.client.port-range</name>
+ <value></value>
+ <description>Range of ports that the MapReduce AM can use when binding.
+ Leave blank if you want all possible ports.
+ For example 50000-50050,50100-50200</description>
+</property>
+
+<property>
<name>yarn.app.mapreduce.am.scheduler.heartbeat.interval-ms</name>
<value>1000</value>
<description>The interval in ms at which the MR AppMaster should send
@@ -1250,6 +1258,19 @@
to the RM to fetch Application Status.</description>
</property>
+<property>
+ <name>yarn.app.mapreduce.client.max-retries</name>
+ <value>3</value>
+ <description>The number of client retries to the RM/HS/AM before
+ throwing exception. This is a layer above the ipc.</description>
+</property>
+
+<property>
+ <name>yarn.app.mapreduce.am.resource.mb</name>
+ <value>1536</value>
+ <description>The amount of memory the MR AppMaster needs.</description>
+</property>
+
<!-- jobhistory properties -->
<property>
Propchange: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml
------------------------------------------------------------------------------
Merged /hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/resources/mapred-default.xml:r1327719-1333290
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Thu May 3 02:14:01 2012
@@ -78,6 +78,7 @@ import org.apache.hadoop.security.UserGr
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.yarn.YarnException;
import org.apache.hadoop.yarn.api.records.DelegationToken;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnRemoteException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
@@ -147,6 +148,18 @@ public class HistoryClientService extend
+ ":" + server.getPort());
LOG.info("Instantiated MRClientService at " + this.bindAddress);
+ if (getConfig().getBoolean(YarnConfiguration.IS_MINI_YARN_CLUSTER, false)) {
+ String resolvedAddress = bindAddress.getHostName() + ":" + bindAddress.getPort();
+ conf.set(JHAdminConfig.MR_HISTORY_ADDRESS, resolvedAddress);
+
+ String hostname = getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
+ hostname = (hostname.contains(":")) ? hostname.substring(0, hostname.indexOf(":")) : hostname;
+ int port = webApp.port();
+ resolvedAddress = hostname + ":" + port;
+ conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, resolvedAddress);
+ }
+
super.start();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTaskPage.java Thu May 3 02:14:01 2012
@@ -89,7 +89,8 @@ public class HsTaskPage extends HsView {
headRow.
th(".id", "Attempt").
th(".state", "State").
- th(".node", "node").
+ th(".node", "Node").
+ th(".logs", "Logs").
th(".tsh", "Start Time");
if(type == TaskType.REDUCE) {
@@ -144,10 +145,11 @@ public class HsTaskPage extends HsView {
_(taid)._().td(ta.getState().toString()).td().a(".nodelink",
"http://"+ nodeHttpAddr,
nodeRackName + "/" + nodeHttpAddr);
- td._(" ").a(".logslink",
- url("logs", nodeIdString, containerIdString, taid, app.getJob()
- .getUserName()), "logs");
td._();
+ row.td().
+ a(".logslink",
+ url("logs", nodeIdString, containerIdString, taid, app.getJob()
+ .getUserName()), "logs")._();
row.td().
br().$title(String.valueOf(attemptStartTime))._().
@@ -196,6 +198,8 @@ public class HsTaskPage extends HsView {
th().input("search_init").$type(InputType.text).
$name("attempt_node").$value("Node")._()._().
th().input("search_init").$type(InputType.text).
+ $name("attempt_node").$value("Logs")._()._().
+ th().input("search_init").$type(InputType.text).
$name("attempt_start_time").$value("Start Time")._()._();
if(type == TaskType.REDUCE) {
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java Thu May 3 02:14:01 2012
@@ -20,6 +20,7 @@ package org.apache.hadoop.mapreduce.v2.h
import java.io.IOException;
+import javax.servlet.http.HttpServletResponse;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@@ -66,6 +67,7 @@ public class HsWebServices {
private final HistoryContext ctx;
private WebApp webapp;
+ private @Context HttpServletResponse response;
@Context
UriInfo uriInfo;
@@ -76,6 +78,11 @@ public class HsWebServices {
this.webapp = webapp;
}
+ private void init() {
+ //clear content type
+ response.setContentType(null);
+ }
+
@GET
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public HistoryInfo get() {
@@ -86,6 +93,7 @@ public class HsWebServices {
@Path("/info")
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public HistoryInfo getHistoryInfo() {
+ init();
return new HistoryInfo();
}
@@ -102,6 +110,7 @@ public class HsWebServices {
@QueryParam("finishedTimeEnd") String finishEnd) {
Long countParam = null;
+ init();
if (count != null && !count.isEmpty()) {
try {
@@ -183,6 +192,7 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobInfo getJob(@PathParam("jobid") String jid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
return new JobInfo(job);
}
@@ -192,6 +202,7 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
AMAttemptsInfo amAttempts = new AMAttemptsInfo();
for (AMInfo amInfo : job.getAMInfos()) {
@@ -208,6 +219,7 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
return new JobCounterInfo(this.ctx, job);
}
@@ -217,6 +229,7 @@ public class HsWebServices {
@Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
public ConfInfo getJobConf(@PathParam("jobid") String jid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
ConfInfo info;
try {
@@ -234,6 +247,7 @@ public class HsWebServices {
public TasksInfo getJobTasks(@PathParam("jobid") String jid,
@QueryParam("type") String type) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
TasksInfo allTasks = new TasksInfo();
for (Task task : job.getTasks().values()) {
@@ -259,6 +273,7 @@ public class HsWebServices {
public TaskInfo getJobTask(@PathParam("jobid") String jid,
@PathParam("taskid") String tid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
return new TaskInfo(task);
@@ -271,6 +286,7 @@ public class HsWebServices {
public JobTaskCounterInfo getSingleTaskCounters(
@PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
TaskId taskID = MRApps.toTaskID(tid);
if (taskID == null) {
@@ -289,6 +305,7 @@ public class HsWebServices {
public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
@PathParam("taskid") String tid) {
+ init();
TaskAttemptsInfo attempts = new TaskAttemptsInfo();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
@@ -310,6 +327,7 @@ public class HsWebServices {
public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
@PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
@@ -328,6 +346,7 @@ public class HsWebServices {
@PathParam("jobid") String jid, @PathParam("taskid") String tid,
@PathParam("attemptid") String attId) {
+ init();
Job job = AMWebServices.getJobFromJobIdString(jid, ctx);
Task task = AMWebServices.getTaskFromTaskIdString(tid, job);
TaskAttempt ta = AMWebServices.getTaskAttemptFromTaskAttemptString(attId,
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/test/resources/job_1329348432655_0001_conf.xml Thu May 3 02:14:01 2012
@@ -292,7 +292,7 @@
<property><!--Loaded from job.xml--><name>yarn.resourcemanager.scheduler.address</name><value>0.0.0.0:8030</value></property>
<property><!--Loaded from job.xml--><name>fs.trash.checkpoint.interval</name><value>0</value></property>
<property><!--Loaded from job.xml--><name>s3native.stream-buffer-size</name><value>4096</value></property>
-<property><!--Loaded from job.xml--><name>yarn.scheduler.fifo.minimum-allocation-mb</name><value>1024</value></property>
+<property><!--Loaded from job.xml--><name>yarn.scheduler.minimum-allocation-mb</name><value>128</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.reduce.shuffle.read.timeout</name><value>180000</value></property>
<property><!--Loaded from job.xml--><name>yarn.app.mapreduce.am.command-opts</name><value>-Xmx500m</value></property>
<property><!--Loaded from job.xml--><name>mapreduce.admin.user.env</name><value>LD_LIBRARY_PATH=$HADOOP_COMMON_HOME/lib/native</value></property>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/main/java/org/apache/hadoop/mapred/ClientServiceDelegate.java Thu May 3 02:14:01 2012
@@ -282,7 +282,7 @@ public class ClientServiceDelegate {
}
private synchronized Object invoke(String method, Class argClass,
- Object args) throws YarnRemoteException {
+ Object args) throws IOException {
Method methodOb = null;
try {
methodOb = MRClientProtocol.class.getMethod(method, argClass);
@@ -291,7 +291,11 @@ public class ClientServiceDelegate {
} catch (NoSuchMethodException e) {
throw new YarnException("Method name mismatch", e);
}
- while (true) {
+ int maxRetries = this.conf.getInt(
+ MRJobConfig.MR_CLIENT_MAX_RETRIES,
+ MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES);
+ IOException lastException = null;
+ while (maxRetries > 0) {
try {
return methodOb.invoke(getProxy(), args);
} catch (YarnRemoteException yre) {
@@ -308,13 +312,21 @@ public class ClientServiceDelegate {
" retrying..", e.getTargetException());
// Force reconnection by setting the proxy to null.
realProxy = null;
+ // HS/AMS shut down
+ maxRetries--;
+ lastException = new IOException(e.getMessage());
+
} catch (Exception e) {
LOG.debug("Failed to contact AM/History for job " + jobId
+ " Will retry..", e);
// Force reconnection by setting the proxy to null.
realProxy = null;
+ // RM shutdown
+ maxRetries--;
+ lastException = new IOException(e.getMessage());
}
}
+ throw lastException;
}
public org.apache.hadoop.mapreduce.Counters getJobCounters(JobID arg0) throws IOException,
@@ -364,7 +376,7 @@ public class ClientServiceDelegate {
return result;
}
- public JobStatus getJobStatus(JobID oldJobID) throws YarnRemoteException {
+ public JobStatus getJobStatus(JobID oldJobID) throws IOException {
org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
TypeConverter.toYarn(oldJobID);
GetJobReportRequest request =
@@ -390,7 +402,7 @@ public class ClientServiceDelegate {
}
public org.apache.hadoop.mapreduce.TaskReport[] getTaskReports(JobID oldJobID, TaskType taskType)
- throws YarnRemoteException, YarnRemoteException {
+ throws IOException{
org.apache.hadoop.mapreduce.v2.api.records.JobId jobId =
TypeConverter.toYarn(oldJobID);
GetTaskReportsRequest request =
@@ -407,7 +419,7 @@ public class ClientServiceDelegate {
}
public boolean killTask(TaskAttemptID taskAttemptID, boolean fail)
- throws YarnRemoteException {
+ throws IOException {
org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId attemptID
= TypeConverter.toYarn(taskAttemptID);
if (fail) {
@@ -423,7 +435,7 @@ public class ClientServiceDelegate {
}
public boolean killJob(JobID oldJobID)
- throws YarnRemoteException {
+ throws IOException {
org.apache.hadoop.mapreduce.v2.api.records.JobId jobId
= TypeConverter.toYarn(oldJobID);
KillJobRequest killRequest = recordFactory.newRecordInstance(KillJobRequest.class);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/fs/DistributedFSCheck.java Thu May 3 02:14:01 2012
@@ -128,7 +128,7 @@ public class DistributedFSCheck extends
if (rootStatus.isFile()) {
nrFiles++;
// For a regular file generate <fName,offset> pairs
- long blockSize = fs.getDefaultBlockSize();
+ long blockSize = fs.getDefaultBlockSize(rootFile);
long fileLength = rootStatus.getLen();
for(long offset = 0; offset < fileLength; offset += blockSize)
writer.append(new Text(rootFile.toString()), new LongWritable(offset));
@@ -160,15 +160,16 @@ public class DistributedFSCheck extends
) throws IOException {
// open file
FSDataInputStream in = null;
+ Path p = new Path(name);
try {
- in = fs.open(new Path(name));
+ in = fs.open(p);
} catch(IOException e) {
return name + "@(missing)";
}
in.seek(offset);
long actualSize = 0;
try {
- long blockSize = fs.getDefaultBlockSize();
+ long blockSize = fs.getDefaultBlockSize(p);
reporter.setStatus("reading " + name + "@" +
offset + "/" + blockSize);
for( int curSize = bufferSize;
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/ipc/TestSocketFactory.java Thu May 3 02:14:01 2012
@@ -83,7 +83,9 @@ public class TestSocketFactory {
JobConf jobConf = new JobConf();
FileSystem.setDefaultUri(jobConf, fs.getUri().toString());
miniMRYarnCluster = initAndStartMiniMRYarnCluster(jobConf);
- JobConf jconf = new JobConf(cconf);
+ JobConf jconf = new JobConf(miniMRYarnCluster.getConfig());
+ jconf.set("hadoop.rpc.socket.factory.class.default",
+ "org.apache.hadoop.ipc.DummySocketFactory");
jconf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
String rmAddress = jconf.get("yarn.resourcemanager.address");
String[] split = rmAddress.split(":");
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java Thu May 3 02:14:01 2012
@@ -49,6 +49,7 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.api.records.JobState;
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.FinalApplicationStatus;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
@@ -122,8 +123,7 @@ public class TestClientServiceDelegate {
MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
when(historyServerProxy.getJobReport(getJobReportRequest())).thenThrow(
- new RuntimeException("1")).thenThrow(new RuntimeException("2"))
- .thenThrow(new RuntimeException("3"))
+ new RuntimeException("1")).thenThrow(new RuntimeException("2"))
.thenReturn(getJobReportResponse());
ResourceMgrDelegate rm = mock(ResourceMgrDelegate.class);
@@ -135,7 +135,7 @@ public class TestClientServiceDelegate {
JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
Assert.assertNotNull(jobStatus);
- verify(historyServerProxy, times(4)).getJobReport(
+ verify(historyServerProxy, times(3)).getJobReport(
any(GetJobReportRequest.class));
}
@@ -312,6 +312,74 @@ public class TestClientServiceDelegate {
any(String.class));
}
+ @Test
+ public void testRMDownForJobStatusBeforeGetAMReport() throws IOException {
+ Configuration conf = new YarnConfiguration();
+ testRMDownForJobStatusBeforeGetAMReport(conf,
+ MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES);
+ }
+
+ @Test
+ public void testRMDownForJobStatusBeforeGetAMReportWithRetryTimes()
+ throws IOException {
+ Configuration conf = new YarnConfiguration();
+ conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 2);
+ testRMDownForJobStatusBeforeGetAMReport(conf, conf.getInt(
+ MRJobConfig.MR_CLIENT_MAX_RETRIES,
+ MRJobConfig.DEFAULT_MR_CLIENT_MAX_RETRIES));
+ }
+
+ @Test
+ public void testRMDownRestoreForJobStatusBeforeGetAMReport()
+ throws IOException {
+ Configuration conf = new YarnConfiguration();
+ conf.setInt(MRJobConfig.MR_CLIENT_MAX_RETRIES, 3);
+
+ conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
+ conf.setBoolean(MRJobConfig.JOB_AM_ACCESS_DISABLED,
+ !isAMReachableFromClient);
+ MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
+ when(historyServerProxy.getJobReport(any(GetJobReportRequest.class)))
+ .thenReturn(getJobReportResponse());
+ ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
+ when(rmDelegate.getApplicationReport(jobId.getAppId())).thenThrow(
+ new java.lang.reflect.UndeclaredThrowableException(new IOException(
+ "Connection refuced1"))).thenThrow(
+ new java.lang.reflect.UndeclaredThrowableException(new IOException(
+ "Connection refuced2"))).thenReturn(getFinishedApplicationReport());
+ ClientServiceDelegate clientServiceDelegate = new ClientServiceDelegate(
+ conf, rmDelegate, oldJobId, historyServerProxy);
+ JobStatus jobStatus = clientServiceDelegate.getJobStatus(oldJobId);
+ verify(rmDelegate, times(3)).getApplicationReport(any(ApplicationId.class));
+ Assert.assertNotNull(jobStatus);
+ }
+
+ private void testRMDownForJobStatusBeforeGetAMReport(Configuration conf,
+ int noOfRetries) throws YarnRemoteException {
+ conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
+ conf.setBoolean(MRJobConfig.JOB_AM_ACCESS_DISABLED,
+ !isAMReachableFromClient);
+ MRClientProtocol historyServerProxy = mock(MRClientProtocol.class);
+ ResourceMgrDelegate rmDelegate = mock(ResourceMgrDelegate.class);
+ when(rmDelegate.getApplicationReport(jobId.getAppId())).thenThrow(
+ new java.lang.reflect.UndeclaredThrowableException(new IOException(
+ "Connection refuced1"))).thenThrow(
+ new java.lang.reflect.UndeclaredThrowableException(new IOException(
+ "Connection refuced2"))).thenThrow(
+ new java.lang.reflect.UndeclaredThrowableException(new IOException(
+ "Connection refuced3")));
+ ClientServiceDelegate clientServiceDelegate = new ClientServiceDelegate(
+ conf, rmDelegate, oldJobId, historyServerProxy);
+ try {
+ clientServiceDelegate.getJobStatus(oldJobId);
+ Assert.fail("It should throw exception after retries");
+ } catch (IOException e) {
+ System.out.println("fail to get job status,and e=" + e.toString());
+ }
+ verify(rmDelegate, times(noOfRetries)).getApplicationReport(
+ any(ApplicationId.class));
+ }
+
private GetJobReportRequest getJobReportRequest() {
GetJobReportRequest request = Records.newRecord(GetJobReportRequest.class);
request.setJobId(jobId);
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMiniMRClasspath.java Thu May 3 02:14:01 2012
@@ -173,7 +173,7 @@ public class TestMiniMRClasspath {
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(taskTrackers, namenode, 3);
- JobConf jobConf = new JobConf();
+ JobConf jobConf = mr.createJobConf();
String result;
result = launchWordCount(fileSys.getUri(), jobConf,
"The quick brown fox\nhas many silly\n" + "red fox sox\n", 3, 1);
@@ -205,7 +205,7 @@ public class TestMiniMRClasspath {
fileSys = dfs.getFileSystem();
namenode = fileSys.getUri().toString();
mr = new MiniMRCluster(taskTrackers, namenode, 3);
- JobConf jobConf = new JobConf();
+ JobConf jobConf = mr.createJobConf();
String result;
result = launchExternal(fileSys.getUri(), jobConf,
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestYarnClientProtocolProvider.java Thu May 3 02:14:01 2012
@@ -28,6 +28,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.mapred.LocalJobRunner;
import org.apache.hadoop.mapred.ResourceMgrDelegate;
import org.apache.hadoop.mapred.YARNRunner;
import org.apache.hadoop.mapreduce.protocol.ClientProtocol;
@@ -54,17 +55,26 @@ public class TestYarnClientProtocolProvi
try {
cluster = new Cluster(conf);
- fail("Cluster should not be initialized with out any framework name");
- } catch (IOException e) {
-
+ } catch (Exception e) {
+ throw new Exception(
+ "Failed to initialize a local runner w/o a cluster framework key", e);
}
-
+
+ try {
+ assertTrue("client is not a LocalJobRunner",
+ cluster.getClient() instanceof LocalJobRunner);
+ } finally {
+ if (cluster != null) {
+ cluster.close();
+ }
+ }
+
try {
conf = new Configuration();
conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.YARN_FRAMEWORK_NAME);
cluster = new Cluster(conf);
ClientProtocol client = cluster.getClient();
- assertTrue(client instanceof YARNRunner);
+ assertTrue("client is a YARNRunner", client instanceof YARNRunner);
} catch (IOException e) {
} finally {
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java Thu May 3 02:14:01 2012
@@ -20,6 +20,8 @@ package org.apache.hadoop.mapreduce.v2;
import java.io.File;
import java.io.IOException;
+import java.net.InetAddress;
+import java.net.UnknownHostException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
@@ -126,6 +128,10 @@ public class MiniMRYarnCluster extends M
@Override
public synchronized void start() {
try {
+ getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
+ getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ MiniYARNCluster.getHostname() + ":0");
historyServer = new JobHistoryServer();
historyServer.init(getConfig());
new Thread() {
@@ -145,6 +151,20 @@ public class MiniMRYarnCluster extends M
} catch (Throwable t) {
throw new YarnException(t);
}
+ //need to do this because historyServer.init creates a new Configuration
+ getConfig().set(JHAdminConfig.MR_HISTORY_ADDRESS,
+ historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
+ getConfig().set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
+ historyServer.getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS));
+
+ LOG.info("MiniMRYARN ResourceManager address: " +
+ getConfig().get(YarnConfiguration.RM_ADDRESS));
+ LOG.info("MiniMRYARN ResourceManager web address: " +
+ getConfig().get(YarnConfiguration.RM_WEBAPP_ADDRESS));
+ LOG.info("MiniMRYARN HistoryServer address: " +
+ getConfig().get(JHAdminConfig.MR_HISTORY_ADDRESS));
+ LOG.info("MiniMRYARN HistoryServer web address: " +
+ getConfig().get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS));
}
@Override
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Thu May 3 02:14:01 2012
@@ -124,15 +124,12 @@
<dependency>
<groupId>junit</groupId>
<artifactId>junit</artifactId>
+ <scope>test</scope>
</dependency>
<dependency>
<groupId>org.jboss.netty</groupId>
<artifactId>netty</artifactId>
</dependency>
- <dependency>
- <groupId>com.cenqua.clover</groupId>
- <artifactId>clover</artifactId>
- </dependency>
</dependencies>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/main/java/org/apache/hadoop/yarn/applications/distributedshell/Client.java Thu May 3 02:14:01 2012
@@ -197,13 +197,19 @@ public class Client {
/**
*/
- public Client() throws Exception {
+ public Client(Configuration conf) throws Exception {
// Set up the configuration and RPC
- conf = new Configuration();
+ this.conf = conf;
rpc = YarnRPC.create(conf);
}
/**
+ */
+ public Client() throws Exception {
+ this(new Configuration());
+ }
+
+ /**
* Helper function to print out usage
* @param opts Parsed command line options
*/
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-applications/hadoop-yarn-applications-distributedshell/src/test/java/org/apache/hadoop/yarn/applications/distributedshell/TestDistributedShell.java Thu May 3 02:14:01 2012
@@ -18,12 +18,19 @@
package org.apache.hadoop.yarn.applications.distributedshell;
+import java.io.File;
+import java.io.FileOutputStream;
import java.io.IOException;
+import java.io.OutputStream;
+import java.net.URL;
+import junit.framework.Assert;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.util.JarFinder;
+import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.MiniYARNCluster;
import org.junit.AfterClass;
import org.junit.BeforeClass;
@@ -42,12 +49,20 @@ public class TestDistributedShell {
@BeforeClass
public static void setup() throws InterruptedException, IOException {
LOG.info("Starting up YARN cluster");
- conf.setInt("yarn.scheduler.fifo.minimum-allocation-mb", 128);
+ conf.setInt(YarnConfiguration.RM_SCHEDULER_MINIMUM_ALLOCATION_MB, 128);
if (yarnCluster == null) {
yarnCluster = new MiniYARNCluster(TestDistributedShell.class.getName(),
1, 1, 1);
yarnCluster.init(conf);
yarnCluster.start();
+ URL url = Thread.currentThread().getContextClassLoader().getResource("yarn-site.xml");
+ if (url == null) {
+ throw new RuntimeException("Could not find 'yarn-site.xml' dummy file in classpath");
+ }
+ yarnCluster.getConfig().set("yarn.application.classpath", new File(url.getPath()).getParent());
+ OutputStream os = new FileOutputStream(new File(url.getPath()));
+ yarnCluster.getConfig().writeXml(os);
+ os.close();
}
try {
Thread.sleep(2000);
@@ -81,14 +96,14 @@ public class TestDistributedShell {
};
LOG.info("Initializing DS Client");
- Client client = new Client();
+ Client client = new Client(new Configuration(yarnCluster.getConfig()));
boolean initSuccess = client.init(args);
- assert(initSuccess);
+ Assert.assertTrue(initSuccess);
LOG.info("Running DS Client");
boolean result = client.run();
LOG.info("Client run completed. Result=" + result);
- assert (result == true);
+ Assert.assertTrue(result);
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/pom.xml Thu May 3 02:14:01 2012
@@ -78,6 +78,7 @@
<configuration>
<tasks>
<copy file="src/main/resources/yarn-default.xml" todir="src/site/resources"/>
+ <copy file="src/main/xsl/configuration.xsl" todir="src/site/resources"/>
</tasks>
</configuration>
</execution>
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java Thu May 3 02:14:01 2012
@@ -107,7 +107,17 @@ public class YarnConfiguration extends C
public static final int DEFAULT_RM_SCHEDULER_PORT = 8030;
public static final String DEFAULT_RM_SCHEDULER_ADDRESS = "0.0.0.0:" +
DEFAULT_RM_SCHEDULER_PORT;
-
+
+ /** Miniumum memory request grant-able by the RM scheduler. */
+ public static final String RM_SCHEDULER_MINIMUM_ALLOCATION_MB =
+ YARN_PREFIX + "scheduler.minimum-allocation-mb";
+ public static final int DEFAULT_RM_SCHEDULER_MINIMUM_ALLOCATION_MB = 128;
+
+ /** Maximum memory request grant-able by the RM scheduler. */
+ public static final String RM_SCHEDULER_MAXIMUM_ALLOCATION_MB =
+ YARN_PREFIX + "scheduler.maximum-allocation-mb";
+ public static final int DEFAULT_RM_SCHEDULER_MAXIMUM_ALLOCATION_MB = 10240;
+
/** Number of threads to handle scheduler interface.*/
public static final String RM_SCHEDULER_CLIENT_THREAD_COUNT =
RM_PREFIX + "scheduler.client.thread-count";
@@ -538,6 +548,8 @@ public class YarnConfiguration extends C
/** Container temp directory */
public static final String DEFAULT_CONTAINER_TEMP_DIR = "./tmp";
+ public static final String IS_MINI_YARN_CLUSTER = YARN_PREFIX + ".is.minicluster";
+
public YarnConfiguration() {
super();
}
Modified: hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/RpcServerFactory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/RpcServerFactory.java?rev=1333291&r1=1333290&r2=1333291&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/RpcServerFactory.java (original)
+++ hadoop/common/branches/HDFS-3042/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/factories/RpcServerFactory.java Thu May 3 02:14:01 2012
@@ -31,6 +31,6 @@ public interface RpcServerFactory {
public Server getServer(Class<?> protocol, Object instance,
InetSocketAddress addr, Configuration conf,
SecretManager<? extends TokenIdentifier> secretManager,
- int numHandlers)
+ int numHandlers, String portRangeConfig)
throws YarnException;
}
\ No newline at end of file