You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by to...@apache.org on 2011/12/15 00:34:10 UTC
svn commit: r1214546 [3/5] - in
/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project: ./ conf/
hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapred/
hadoop-mapreduce-client/hadoop-map...
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java Wed Dec 14 23:34:04 2011
@@ -34,6 +34,9 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.security.authorize.AccessControlList;
@@ -56,19 +59,6 @@ import static org.apache.hadoop.yarn.web
public class HsJobBlock extends HtmlBlock {
final AppContext appContext;
- int killedMapAttempts = 0;
- int failedMapAttempts = 0;
- int successfulMapAttempts = 0;
- int killedReduceAttempts = 0;
- int failedReduceAttempts = 0;
- int successfulReduceAttempts = 0;
- long avgMapTime = 0;
- long avgReduceTime = 0;
- long avgShuffleTime = 0;
- long avgSortTime = 0;
- int numMaps;
- int numReduces;
-
@Inject HsJobBlock(AppContext appctx) {
appContext = appctx;
}
@@ -85,37 +75,30 @@ public class HsJobBlock extends HtmlBloc
return;
}
JobId jobID = MRApps.toJobID(jid);
- Job job = appContext.getJob(jobID);
- if (job == null) {
+ Job j = appContext.getJob(jobID);
+ if (j == null) {
html.
p()._("Sorry, ", jid, " not found.")._();
return;
}
- Map<JobACL, AccessControlList> acls = job.getJobACLs();
- List<AMInfo> amInfos = job.getAMInfos();
- JobReport jobReport = job.getReport();
- int mapTasks = job.getTotalMaps();
- int mapTasksComplete = job.getCompletedMaps();
- int reduceTasks = job.getTotalReduces();
- int reducesTasksComplete = job.getCompletedReduces();
- long startTime = jobReport.getStartTime();
- long finishTime = jobReport.getFinishTime();
- countTasksAndAttempts(job);
+ List<AMInfo> amInfos = j.getAMInfos();
+ JobInfo job = new JobInfo(j);
ResponseInfo infoBlock = info("Job Overview").
_("Job Name:", job.getName()).
_("User Name:", job.getUserName()).
_("Queue:", job.getQueueName()).
_("State:", job.getState()).
_("Uberized:", job.isUber()).
- _("Started:", new Date(startTime)).
- _("Finished:", new Date(finishTime)).
+ _("Started:", new Date(job.getStartTime())).
+ _("Finished:", new Date(job.getFinishTime())).
_("Elapsed:", StringUtils.formatTime(
- Times.elapsed(startTime, finishTime, false)));
+ Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
String amString =
amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
- List<String> diagnostics = job.getDiagnostics();
+ // todo - switch to use JobInfo
+ List<String> diagnostics = j.getDiagnostics();
if(diagnostics != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
for(String diag: diagnostics) {
@@ -124,18 +107,17 @@ public class HsJobBlock extends HtmlBloc
infoBlock._("Diagnostics:", b.toString());
}
- if(numMaps > 0) {
- infoBlock._("Average Map Time", StringUtils.formatTime(avgMapTime));
+ if(job.getNumMaps() > 0) {
+ infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
}
- if(numReduces > 0) {
- infoBlock._("Average Reduce Time", StringUtils.formatTime(avgReduceTime));
- infoBlock._("Average Shuffle Time", StringUtils.formatTime(avgShuffleTime));
- infoBlock._("Average Merge Time", StringUtils.formatTime(avgSortTime));
+ if(job.getNumReduces() > 0) {
+ infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
+ infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
+ infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
}
- for(Map.Entry<JobACL, AccessControlList> entry : acls.entrySet()) {
- infoBlock._("ACL "+entry.getKey().getAclName()+":",
- entry.getValue().getAclString());
+ for (ConfEntryInfo entry : job.getAcls()) {
+ infoBlock._("ACL "+entry.getName()+":", entry.getValue());
}
DIV<Hamlet> div = html.
_(InfoBlock.class).
@@ -154,18 +136,14 @@ public class HsJobBlock extends HtmlBloc
th(_TH, "Logs").
_();
for (AMInfo amInfo : amInfos) {
- String nodeHttpAddress = amInfo.getNodeManagerHost() +
- ":" + amInfo.getNodeManagerHttpPort();
- NodeId nodeId = BuilderUtils.newNodeId(
- amInfo.getNodeManagerHost(), amInfo.getNodeManagerPort());
-
+ AMAttemptInfo attempt = new AMAttemptInfo(amInfo,
+ job.getId(), job.getUserName(), "", "");
table.tr().
- td(String.valueOf(amInfo.getAppAttemptId().getAttemptId())).
- td(new Date(amInfo.getStartTime()).toString()).
- td().a(".nodelink", url("http://", nodeHttpAddress),
- nodeHttpAddress)._().
- td().a(".logslink", url("logs", nodeId.toString(),
- amInfo.getContainerId().toString(), jid, job.getUserName()),
+ td(String.valueOf(attempt.getAttemptId())).
+ td(new Date(attempt.getStartTime()).toString()).
+ td().a(".nodelink", url("http://", attempt.getNodeHttpAddress()),
+ attempt.getNodeHttpAddress())._().
+ td().a(".logslink", url(attempt.getShortLogsLink()),
"logs")._().
_();
}
@@ -184,13 +162,13 @@ public class HsJobBlock extends HtmlBloc
tr(_ODD).
th().
a(url("tasks", jid, "m"), "Map")._().
- td(String.valueOf(mapTasks)).
- td(String.valueOf(mapTasksComplete))._().
+ td(String.valueOf(String.valueOf(job.getMapsTotal()))).
+ td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().
tr(_EVEN).
th().
a(url("tasks", jid, "r"), "Reduce")._().
- td(String.valueOf(reduceTasks)).
- td(String.valueOf(reducesTasksComplete))._()
+ td(String.valueOf(String.valueOf(job.getReducesTotal()))).
+ td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()
._().
// Attempts table
@@ -204,99 +182,27 @@ public class HsJobBlock extends HtmlBloc
th("Maps").
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.FAILED.toString()),
- String.valueOf(failedMapAttempts))._().
+ String.valueOf(job.getFailedMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.KILLED.toString()),
- String.valueOf(killedMapAttempts))._().
+ String.valueOf(job.getKilledMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.SUCCESSFUL.toString()),
- String.valueOf(successfulMapAttempts))._().
+ String.valueOf(job.getSuccessfulMapAttempts()))._().
_().
tr(_EVEN).
th("Reduces").
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.FAILED.toString()),
- String.valueOf(failedReduceAttempts))._().
+ String.valueOf(job.getFailedReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.KILLED.toString()),
- String.valueOf(killedReduceAttempts))._().
+ String.valueOf(job.getKilledReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.SUCCESSFUL.toString()),
- String.valueOf(successfulReduceAttempts))._().
+ String.valueOf(job.getSuccessfulReduceAttempts()))._().
_().
_().
_();
}
-
- /**
- * Go through a job and update the member variables with counts for
- * information to output in the page.
- * @param job the job to get counts for.
- */
- private void countTasksAndAttempts(Job job) {
- numReduces = 0;
- numMaps = 0;
- Map<TaskId, Task> tasks = job.getTasks();
- for (Task task : tasks.values()) {
- // Attempts counts
- Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
- for (TaskAttempt attempt : attempts.values()) {
-
- int successful = 0, failed = 0, killed =0;
-
- if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
- //Do Nothing
- } else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt
- .getState())) {
- //Do Nothing
- } else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
- .getState())) {
- ++successful;
- } else if (TaskAttemptStateUI.FAILED
- .correspondsTo(attempt.getState())) {
- ++failed;
- } else if (TaskAttemptStateUI.KILLED
- .correspondsTo(attempt.getState())) {
- ++killed;
- }
-
- switch (task.getType()) {
- case MAP:
- successfulMapAttempts += successful;
- failedMapAttempts += failed;
- killedMapAttempts += killed;
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- numMaps++;
- avgMapTime += (attempt.getFinishTime() -
- attempt.getLaunchTime());
- }
- break;
- case REDUCE:
- successfulReduceAttempts += successful;
- failedReduceAttempts += failed;
- killedReduceAttempts += killed;
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- numReduces++;
- avgShuffleTime += (attempt.getShuffleFinishTime() -
- attempt.getLaunchTime());
- avgSortTime += attempt.getSortFinishTime() -
- attempt.getLaunchTime();
- avgReduceTime += (attempt.getFinishTime() -
- attempt.getShuffleFinishTime());
- }
- break;
- }
- }
- }
-
- if(numMaps > 0) {
- avgMapTime = avgMapTime / numMaps;
- }
-
- if(numReduces > 0) {
- avgReduceTime = avgReduceTime / numReduces;
- avgShuffleTime = avgShuffleTime / numReduces;
- avgSortTime = avgSortTime / numReduces;
- }
- }
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java Wed Dec 14 23:34:04 2011
@@ -21,10 +21,9 @@ package org.apache.hadoop.mapreduce.v2.h
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -38,8 +37,8 @@ import com.google.inject.Inject;
*/
public class HsJobsBlock extends HtmlBlock {
final AppContext appContext;
- static final SimpleDateFormat dateFormat =
- new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
+ static final SimpleDateFormat dateFormat =
+ new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
@Inject HsJobsBlock(AppContext appCtx) {
appContext = appCtx;
@@ -68,28 +67,21 @@ public class HsJobsBlock extends HtmlBlo
th("Reduces Completed")._()._().
tbody();
LOG.info("Getting list of all Jobs.");
- for (Job job : appContext.getAllJobs().values()) {
- String jobID = MRApps.toString(job.getID());
- JobReport report = job.getReport();
- String mapsTotal = String.valueOf(job.getTotalMaps());
- String mapsCompleted = String.valueOf(job.getCompletedMaps());
- String reduceTotal = String.valueOf(job.getTotalReduces());
- String reduceCompleted = String.valueOf(job.getCompletedReduces());
- long startTime = report.getStartTime();
- long finishTime = report.getFinishTime();
+ for (Job j : appContext.getAllJobs().values()) {
+ JobInfo job = new JobInfo(j);
tbody.
tr().
- td(dateFormat.format(new Date(startTime))).
- td(dateFormat.format(new Date(finishTime))).
- td().a(url("job", jobID), jobID)._().
- td(job.getName().toString()).
+ td(dateFormat.format(new Date(job.getStartTime()))).
+ td(dateFormat.format(new Date(job.getFinishTime()))).
+ td().a(url("job", job.getId()), job.getId())._().
+ td(job.getName()).
td(job.getUserName()).
td(job.getQueueName()).
- td(job.getState().toString()).
- td(mapsTotal).
- td(mapsCompleted).
- td(reduceTotal).
- td(reduceCompleted)._();
+ td(job.getState()).
+ td(String.valueOf(job.getMapsTotal())).
+ td(String.valueOf(job.getMapsCompleted())).
+ td(String.valueOf(job.getReducesTotal())).
+ td(String.valueOf(job.getReducesCompleted()))._();
}
tbody._().
tfoot().
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Wed Dec 14 23:34:04 2011
@@ -20,12 +20,13 @@ package org.apache.hadoop.mapreduce.v2.h
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times;
@@ -65,7 +66,7 @@ public class HsTasksBlock extends HtmlBl
if (!symbol.isEmpty()) {
type = MRApps.taskType(symbol);
}
-
+
THEAD<TABLE<Hamlet>> thead = html.table("#tasks").thead();
//Create the spanning row
int attemptColSpan = type == TaskType.REDUCE ? 8 : 3;
@@ -74,7 +75,7 @@ public class HsTasksBlock extends HtmlBl
th().$colspan(attemptColSpan).$class("ui-state-default").
_("Successful Attempt")._().
_();
-
+
TR<THEAD<TABLE<Hamlet>>> theadRow = thead.
tr().
th("Name").
@@ -83,33 +84,33 @@ public class HsTasksBlock extends HtmlBl
th("Finish Time").
th("Elapsed Time").
th("Start Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Shuffle Finish Time"); //Attempt
theadRow.th("Merge Finish Time"); //Attempt
}
-
+
theadRow.th("Finish Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Elapsed Time Shuffle"); //Attempt
theadRow.th("Elapsed Time Merge"); //Attempt
theadRow.th("Elapsed Time Reduce"); //Attempt
}
theadRow.th("Elapsed Time"); //Attempt
-
+
TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}
- String tid = MRApps.toString(task.getID());
-
- TaskReport report = task.getReport();
- long startTime = report.getStartTime();
- long finishTime = report.getFinishTime();
- long elapsed = Times.elapsed(startTime, finishTime, false);
-
+ TaskInfo info = new TaskInfo(task);
+ String tid = info.getId();
+
+ long startTime = info.getStartTime();
+ long finishTime = info.getFinishTime();
+ long elapsed = info.getElapsedTime();
+
long attemptStartTime = -1;
long shuffleFinishTime = -1;
long sortFinishTime = -1;
@@ -118,30 +119,31 @@ public class HsTasksBlock extends HtmlBl
long elapsedSortTime = -1;;
long elapsedReduceTime = -1;
long attemptElapsed = -1;
- TaskAttempt successful = getSuccessfulAttempt(task);
+ TaskAttempt successful = info.getSuccessful();
if(successful != null) {
- attemptStartTime = successful.getLaunchTime();
- attemptFinishTime = successful.getFinishTime();
+ TaskAttemptInfo ta;
if(type == TaskType.REDUCE) {
- shuffleFinishTime = successful.getShuffleFinishTime();
- sortFinishTime = successful.getSortFinishTime();
- elapsedShuffleTime =
- Times.elapsed(attemptStartTime, shuffleFinishTime, false);
- elapsedSortTime =
- Times.elapsed(shuffleFinishTime, sortFinishTime, false);
- elapsedReduceTime =
- Times.elapsed(sortFinishTime, attemptFinishTime, false);
+ ReduceTaskAttemptInfo rta = new ReduceTaskAttemptInfo(successful, type);
+ shuffleFinishTime = rta.getShuffleFinishTime();
+ sortFinishTime = rta.getMergeFinishTime();
+ elapsedShuffleTime = rta.getElapsedShuffleTime();
+ elapsedSortTime = rta.getElapsedMergeTime();
+ elapsedReduceTime = rta.getElapsedReduceTime();
+ ta = rta;
+ } else {
+ ta = new TaskAttemptInfo(successful, type, false);
}
- attemptElapsed =
- Times.elapsed(attemptStartTime, attemptFinishTime, false);
+ attemptStartTime = ta.getStartTime();
+ attemptFinishTime = ta.getFinishTime();
+ attemptElapsed = ta.getElapsedTime();
}
-
+
TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
row.
td().
- br().$title(String.valueOf(task.getID().getId()))._(). // sorting
+ br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
a(url("task", tid), tid)._().
- td(report.getTaskState().toString()).
+ td(info.getState()).
td().
br().$title(String.valueOf(startTime))._().
_(Times.format(startTime))._().
@@ -166,7 +168,7 @@ public class HsTasksBlock extends HtmlBl
td().
br().$title(String.valueOf(attemptFinishTime))._().
_(Times.format(attemptFinishTime))._();
-
+
if(type == TaskType.REDUCE) {
row.td().
br().$title(String.valueOf(elapsedShuffleTime))._().
@@ -178,7 +180,7 @@ public class HsTasksBlock extends HtmlBl
br().$title(String.valueOf(elapsedReduceTime))._().
_(formatTime(elapsedReduceTime))._();
}
-
+
row.td().
br().$title(String.valueOf(attemptElapsed))._().
_(formatTime(attemptElapsed))._();
@@ -194,7 +196,7 @@ public class HsTasksBlock extends HtmlBl
.$type(InputType.text).$name("elapsed_time").$value("Elapsed Time")._()
._().th().input("search_init").$type(InputType.text)
.$name("attempt_start_time").$value("Start Time")._()._();
-
+
if(type == TaskType.REDUCE) {
footRow.th().input("search_init").$type(InputType.text)
.$name("shuffle_time").$value("Shuffle Time")._()._();
@@ -216,20 +218,12 @@ public class HsTasksBlock extends HtmlBl
footRow.th().input("search_init").$type(InputType.text)
.$name("attempt_elapsed").$value("Elapsed Time")._()._();
-
+
footRow._()._()._();
}
private String formatTime(long elapsed) {
return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed);
}
-
- private TaskAttempt getSuccessfulAttempt(Task task) {
- for(TaskAttempt attempt: task.getAttempts().values()) {
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- return attempt;
- }
- }
- return null;
- }
+
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java Wed Dec 14 23:34:04 2011
@@ -27,6 +27,7 @@ import static org.apache.hadoop.yarn.web
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
public class HsWebApp extends WebApp implements AMParams {
@@ -39,6 +40,9 @@ public class HsWebApp extends WebApp imp
@Override
public void setup() {
+ bind(HsWebServices.class);
+ bind(JAXBContextResolver.class);
+ bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(history);
route("/", HsController.class);
route("/app", HsController.class);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClientServiceDelegate.java Wed Dec 14 23:34:04 2011
@@ -168,7 +168,7 @@ public class TestClientServiceDelegate {
GetJobReportResponse jobReportResponse1 = mock(GetJobReportResponse.class);
when(jobReportResponse1.getJobReport()).thenReturn(
MRBuilderUtils.newJobReport(jobId, "jobName-firstGen", "user",
- JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "anything", null));
+ JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "anything", null, false));
// First AM returns a report with jobName firstGen and simulates AM shutdown
// on second invocation.
@@ -180,7 +180,7 @@ public class TestClientServiceDelegate {
GetJobReportResponse jobReportResponse2 = mock(GetJobReportResponse.class);
when(jobReportResponse2.getJobReport()).thenReturn(
MRBuilderUtils.newJobReport(jobId, "jobName-secondGen", "user",
- JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "anything", null));
+ JobState.RUNNING, 0, 0, 0, 0, 0, 0, 0, "anything", null, false));
// Second AM generation returns a report with jobName secondGen
MRClientProtocol secondGenAMProxy = mock(MRClientProtocol.class);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestUberAM.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestUberAM.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestUberAM.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestUberAM.java Wed Dec 14 23:34:04 2011
@@ -49,6 +49,7 @@ public class TestUberAM extends TestMRJo
}
@Override
+ @Test
public void testSleepJob()
throws IOException, InterruptedException, ClassNotFoundException {
if (mrCluster != null) {
@@ -84,6 +85,7 @@ public class TestUberAM extends TestMRJo
}
@Override
+ @Test
public void testRandomWriter()
throws IOException, InterruptedException, ClassNotFoundException {
super.testRandomWriter();
@@ -101,6 +103,7 @@ public class TestUberAM extends TestMRJo
}
@Override
+ @Test
public void testFailingMapper()
throws IOException, InterruptedException, ClassNotFoundException {
LOG.info("\n\n\nStarting uberized testFailingMapper().");
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Wed Dec 14 23:34:04 2011
@@ -238,7 +238,7 @@
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
- <version>2.0</version>
+ <version>3.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-api/src/main/java/org/apache/hadoop/yarn/api/ApplicationConstants.java Wed Dec 14 23:34:04 2011
@@ -24,7 +24,6 @@ import org.apache.hadoop.security.UserGr
* This is the API for the applications comprising of constants that YARN sets
* up for the applications and the containers.
*
- * TODO: Should also be defined in avro/pb IDLs
* TODO: Investigate the semantics and security of each cross-boundary refs.
*/
public interface ApplicationConstants {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/conf/YarnConfiguration.java Wed Dec 14 23:34:04 2011
@@ -91,12 +91,7 @@ public class YarnConfiguration extends C
public static final String RM_CLIENT_THREAD_COUNT =
RM_PREFIX + "client.thread-count";
public static final int DEFAULT_RM_CLIENT_THREAD_COUNT = 10;
-
- /** The expiry interval for application master reporting.*/
- public static final String RM_AM_EXPIRY_INTERVAL_MS =
- RM_PREFIX + "am.liveness-monitor.expiry-interval-ms";
- public static final int DEFAULT_RM_AM_EXPIRY_INTERVAL_MS = 600000;
-
+
/** The Kerberos principal for the resource manager.*/
public static final String RM_PRINCIPAL =
RM_PREFIX + "principal";
@@ -126,7 +121,17 @@ public class YarnConfiguration extends C
public static final int DEFAULT_RM_RESOURCE_TRACKER_PORT = 8025;
public static final String DEFAULT_RM_RESOURCE_TRACKER_ADDRESS =
"0.0.0.0:" + DEFAULT_RM_RESOURCE_TRACKER_PORT;
-
+
+ /** The expiry interval for application master reporting.*/
+ public static final String RM_AM_EXPIRY_INTERVAL_MS =
+ YARN_PREFIX + "am.liveness-monitor.expiry-interval-ms";
+ public static final int DEFAULT_RM_AM_EXPIRY_INTERVAL_MS = 600000;
+
+ /** How long to wait until a node manager is considered dead.*/
+ public static final String RM_NM_EXPIRY_INTERVAL_MS =
+ YARN_PREFIX + "nm.liveness-monitor.expiry-interval-ms";
+ public static final int DEFAULT_RM_NM_EXPIRY_INTERVAL_MS = 600000;
+
/** Are acls enabled.*/
public static final String YARN_ACL_ENABLE =
YARN_PREFIX + "acl.enable";
@@ -160,12 +165,7 @@ public class YarnConfiguration extends C
/** The keytab for the resource manager.*/
public static final String RM_KEYTAB =
RM_PREFIX + "keytab";
-
- /** How long to wait until a node manager is considered dead.*/
- public static final String RM_NM_EXPIRY_INTERVAL_MS =
- RM_PREFIX + "nm.liveness-monitor.expiry-interval-ms";
- public static final int DEFAULT_RM_NM_EXPIRY_INTERVAL_MS = 600000;
-
+
/** How long to wait until a container is considered dead.*/
public static final String RM_CONTAINER_ALLOC_EXPIRY_INTERVAL_MS =
RM_PREFIX + "rm.container-allocation.expiry-interval-ms";
@@ -293,10 +293,16 @@ public class YarnConfiguration extends C
public static final String NM_LOG_DIRS = NM_PREFIX + "log-dirs";
public static final String DEFAULT_NM_LOG_DIRS = "/tmp/logs";
+ /** Interval at which the delayed token removal thread runs */
+ public static final String RM_DELAYED_DELEGATION_TOKEN_REMOVAL_INTERVAL_MS =
+ RM_PREFIX + "delayed.delegation-token.removal-interval-ms";
+ public static final long DEFAULT_RM_DELAYED_DELEGATION_TOKEN_REMOVAL_INTERVAL_MS =
+ 30000l;
+
/** Whether to enable log aggregation */
- public static final String NM_LOG_AGGREGATION_ENABLED = NM_PREFIX
+ public static final String LOG_AGGREGATION_ENABLED = YARN_PREFIX
+ "log-aggregation-enable";
- public static final boolean DEFAULT_NM_LOG_AGGREGATION_ENABLED = false;
+ public static final boolean DEFAULT_LOG_AGGREGATION_ENABLED = false;
/**
* Number of seconds to retain logs on the NodeManager. Only applicable if Log
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java Wed Dec 14 23:34:04 2011
@@ -18,24 +18,29 @@
package org.apache.hadoop.yarn.webapp;
-import com.google.common.base.CharMatcher;
-import static com.google.common.base.Preconditions.*;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.inject.Provides;
-import com.google.inject.servlet.GuiceFilter;
-import com.google.inject.servlet.ServletModule;
+import static com.google.common.base.Preconditions.checkNotNull;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer;
-import org.apache.hadoop.yarn.util.StringHelper;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.CharMatcher;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import com.google.inject.Provides;
+import com.google.inject.servlet.GuiceFilter;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.core.ResourceConfig;
+import com.sun.jersey.core.util.FeaturesAndProperties;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
/**
* @see WebApps for a usage example
*/
@@ -45,9 +50,10 @@ public abstract class WebApp extends Ser
public enum HTTP { GET, POST, HEAD, PUT, DELETE };
private volatile String name;
- private volatile List<String> servePathSpecs = new ArrayList<String>();
+ private volatile List<String> servePathSpecs = new ArrayList<String>();
// path to redirect to if user goes to "/"
private volatile String redirectPath;
+ private volatile String wsName;
private volatile Configuration conf;
private volatile HttpServer httpServer;
private volatile GuiceFilter guiceFilter;
@@ -104,18 +110,20 @@ public abstract class WebApp extends Ser
void addServePathSpec(String path) { this.servePathSpecs.add(path); }
- public String[] getServePathSpecs() {
+ public String[] getServePathSpecs() {
return this.servePathSpecs.toArray(new String[this.servePathSpecs.size()]);
}
/**
- * Set a path to redirect the user to if they just go to "/". For
- * instance "/" goes to "/yarn/apps". This allows the filters to
+ * Set a path to redirect the user to if they just go to "/". For
+ * instance "/" goes to "/yarn/apps". This allows the filters to
* more easily differentiate the different webapps.
* @param path the path to redirect to
*/
void setRedirectPath(String path) { this.redirectPath = path; }
+ void setWebServices (String name) { this.wsName = name; }
+
public String getRedirectPath() { return this.redirectPath; }
void setHostClass(Class<?> cls) {
@@ -129,10 +137,32 @@ public abstract class WebApp extends Ser
@Override
public void configureServlets() {
setup();
+
serve("/", "/__stop").with(Dispatcher.class);
+
for (String path : this.servePathSpecs) {
serve(path).with(Dispatcher.class);
}
+
+ // Add in the web services filters/serves if app has them.
+ // Using Jersey/guice integration module. If user has web services
+ // they must have also bound a default one in their webapp code.
+ if (this.wsName != null) {
+ // There seems to be an issue with the guice/jersey integration
+ // where we have to list the stuff we don't want it to serve
+ // through the guicecontainer. In this case its everything except
+ // the the web services api prefix. We can't just change the filter
+ // from /* below - that doesn't work.
+ String regex = "(?!/" + this.wsName + ")";
+ serveRegex(regex).with(DefaultWrapperServlet.class);
+
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(ResourceConfig.FEATURE_IMPLICIT_VIEWABLES, "true");
+ params.put(ServletContainer.FEATURE_FILTER_FORWARD_ON_404, "true");
+ params.put(FeaturesAndProperties.FEATURE_XMLROOTELEMENT_PROCESSING, "true");
+ filter("/*").through(GuiceContainer.class, params);
+ }
+
}
/**
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java Wed Dec 14 23:34:04 2011
@@ -72,6 +72,7 @@ public class WebApps {
}
final String name;
+ final String wsName;
final Class<T> api;
final T application;
String bindAddress = "0.0.0.0";
@@ -82,10 +83,15 @@ public class WebApps {
private final HashSet<ServletStruct> servlets = new HashSet<ServletStruct>();
private final HashMap<String, Object> attributes = new HashMap<String, Object>();
- Builder(String name, Class<T> api, T application) {
+ Builder(String name, Class<T> api, T application, String wsName) {
this.name = name;
this.api = api;
this.application = application;
+ this.wsName = wsName;
+ }
+
+ Builder(String name, Class<T> api, T application) {
+ this(name, api, application, null);
}
public Builder<T> at(String bindAddress) {
@@ -142,6 +148,7 @@ public class WebApps {
};
}
webapp.setName(name);
+ webapp.setWebServices(wsName);
String basePath = "/" + name;
webapp.setRedirectPath(basePath);
if (basePath.equals("/")) {
@@ -150,6 +157,14 @@ public class WebApps {
webapp.addServePathSpec(basePath);
webapp.addServePathSpec(basePath + "/*");
}
+ if (wsName != null && !wsName.equals(basePath)) {
+ if (wsName.equals("/")) {
+ webapp.addServePathSpec("/*");
+ } else {
+ webapp.addServePathSpec("/" + wsName);
+ webapp.addServePathSpec("/" + wsName + "/*");
+ }
+ }
if (conf == null) {
conf = new Configuration();
}
@@ -238,6 +253,20 @@ public class WebApps {
* @param prefix of the webapp
* @param api the api class for the application
* @param app the application instance
+ * @param wsPrefix the prefix for the webservice api for this app
+ * @return a webapp builder
+ */
+ public static <T> Builder<T> $for(String prefix, Class<T> api, T app, String wsPrefix) {
+ return new Builder<T>(prefix, api, app, wsPrefix);
+ }
+
+ /**
+ * Create a new webapp builder.
+ * @see WebApps for a complete example
+ * @param <T> application (holding the embedded webapp) type
+ * @param prefix of the webapp
+ * @param api the api class for the application
+ * @param app the application instance
* @return a webapp builder
*/
public static <T> Builder<T> $for(String prefix, Class<T> api, T app) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/log/AggregatedLogsBlock.java Wed Dec 14 23:34:04 2011
@@ -53,8 +53,8 @@ public class AggregatedLogsBlock extends
logEntity = containerId.toString();
}
- if (!conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
- YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
+ if (!conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
html.h1()
._("Aggregation is not enabled. Try the nodemanager at " + nodeId)
._();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/test/java/org/apache/hadoop/yarn/TestRPC.java Wed Dec 14 23:34:04 2011
@@ -58,16 +58,6 @@ public class TestRPC {
private static final String EXCEPTION_CAUSE = "exception cause";
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-// @Test
-// public void testAvroRPC() throws Exception {
-// test(AvroYarnRPC.class.getName());
-// }
-//
-// @Test
-// public void testHadoopNativeRPC() throws Exception {
-// test(HadoopYarnRPC.class.getName());
-// }
-
@Test
public void testUnknownCall() {
Configuration conf = new Configuration();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/NodeStatus.java Wed Dec 14 23:34:04 2011
@@ -19,6 +19,7 @@ package org.apache.hadoop.yarn.server.ap
import java.util.List;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
@@ -33,6 +34,9 @@ public interface NodeStatus {
public abstract void setContainersStatuses(
List<ContainerStatus> containersStatuses);
+ public abstract List<ApplicationId> getKeepAliveApplications();
+ public abstract void setKeepAliveApplications(List<ApplicationId> appIds);
+
NodeHealthStatus getNodeHealthStatus();
void setNodeHealthStatus(NodeHealthStatus healthStatus);
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/java/org/apache/hadoop/yarn/server/api/records/impl/pb/NodeStatusPBImpl.java Wed Dec 14 23:34:04 2011
@@ -23,13 +23,16 @@ import java.util.ArrayList;
import java.util.Iterator;
import java.util.List;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ContainerStatus;
import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.ProtoBase;
+import org.apache.hadoop.yarn.api.records.impl.pb.ApplicationIdPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.ContainerStatusPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeHealthStatusPBImpl;
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
+import org.apache.hadoop.yarn.proto.YarnProtos.ApplicationIdProto;
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeHealthStatusProto;
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
@@ -37,7 +40,9 @@ import org.apache.hadoop.yarn.proto.Yarn
import org.apache.hadoop.yarn.proto.YarnServerCommonProtos.NodeStatusProtoOrBuilder;
import org.apache.hadoop.yarn.server.api.records.NodeStatus;
-public class NodeStatusPBImpl extends ProtoBase<NodeStatusProto> implements NodeStatus {
+
+public class NodeStatusPBImpl extends ProtoBase<NodeStatusProto> implements
+ NodeStatus {
NodeStatusProto proto = NodeStatusProto.getDefaultInstance();
NodeStatusProto.Builder builder = null;
boolean viaProto = false;
@@ -45,6 +50,7 @@ public class NodeStatusPBImpl extends Pr
private NodeId nodeId = null;
private List<ContainerStatus> containers = null;
private NodeHealthStatus nodeHealthStatus = null;
+ private List<ApplicationId> keepAliveApplications = null;
public NodeStatusPBImpl() {
builder = NodeStatusProto.newBuilder();
@@ -55,15 +61,14 @@ public class NodeStatusPBImpl extends Pr
viaProto = true;
}
- public NodeStatusProto getProto() {
-
- mergeLocalToProto();
+ public synchronized NodeStatusProto getProto() {
+ mergeLocalToProto();
proto = viaProto ? proto : builder.build();
viaProto = true;
return proto;
}
- private void mergeLocalToBuilder() {
+ private synchronized void mergeLocalToBuilder() {
if (this.nodeId != null) {
builder.setNodeId(convertToProtoFormat(this.nodeId));
}
@@ -73,9 +78,12 @@ public class NodeStatusPBImpl extends Pr
if (this.nodeHealthStatus != null) {
builder.setNodeHealthStatus(convertToProtoFormat(this.nodeHealthStatus));
}
+ if (this.keepAliveApplications != null) {
+ addKeepAliveApplicationsToProto();
+ }
}
- private void mergeLocalToProto() {
+ private synchronized void mergeLocalToProto() {
if (viaProto)
maybeInitBuilder();
mergeLocalToBuilder();
@@ -84,14 +92,14 @@ public class NodeStatusPBImpl extends Pr
viaProto = true;
}
- private void maybeInitBuilder() {
+ private synchronized void maybeInitBuilder() {
if (viaProto || builder == null) {
builder = NodeStatusProto.newBuilder(proto);
}
viaProto = false;
}
- private void addContainersToProto() {
+ private synchronized void addContainersToProto() {
maybeInitBuilder();
builder.clearContainersStatuses();
if (containers == null)
@@ -124,19 +132,53 @@ public class NodeStatusPBImpl extends Pr
};
builder.addAllContainersStatuses(iterable);
}
+
+ private synchronized void addKeepAliveApplicationsToProto() {
+ maybeInitBuilder();
+ builder.clearKeepAliveApplications();
+ if (keepAliveApplications == null)
+ return;
+ Iterable<ApplicationIdProto> iterable = new Iterable<ApplicationIdProto>() {
+ @Override
+ public Iterator<ApplicationIdProto> iterator() {
+ return new Iterator<ApplicationIdProto>() {
+
+ Iterator<ApplicationId> iter = keepAliveApplications.iterator();
+
+ @Override
+ public boolean hasNext() {
+ return iter.hasNext();
+ }
+
+ @Override
+ public ApplicationIdProto next() {
+ return convertToProtoFormat(iter.next());
+ }
+
+ @Override
+ public void remove() {
+ throw new UnsupportedOperationException();
+
+ }
+ };
+
+ }
+ };
+ builder.addAllKeepAliveApplications(iterable);
+ }
@Override
- public int getResponseId() {
+ public synchronized int getResponseId() {
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
return p.getResponseId();
}
@Override
- public void setResponseId(int responseId) {
+ public synchronized void setResponseId(int responseId) {
maybeInitBuilder();
builder.setResponseId(responseId);
}
@Override
- public NodeId getNodeId() {
+ public synchronized NodeId getNodeId() {
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
if (this.nodeId != null) {
return this.nodeId;
@@ -148,8 +190,9 @@ public class NodeStatusPBImpl extends Pr
return this.nodeId;
}
+
@Override
- public void setNodeId(NodeId nodeId) {
+ public synchronized void setNodeId(NodeId nodeId) {
maybeInitBuilder();
if (nodeId == null)
builder.clearNodeId();
@@ -158,20 +201,35 @@ public class NodeStatusPBImpl extends Pr
}
@Override
- public List<ContainerStatus> getContainersStatuses() {
+ public synchronized List<ContainerStatus> getContainersStatuses() {
initContainers();
return this.containers;
}
@Override
- public void setContainersStatuses(List<ContainerStatus> containers) {
+ public synchronized void setContainersStatuses(
+ List<ContainerStatus> containers) {
if (containers == null) {
builder.clearContainersStatuses();
}
this.containers = containers;
}
+
+ @Override
+ public synchronized List<ApplicationId> getKeepAliveApplications() {
+ initKeepAliveApplications();
+ return this.keepAliveApplications;
+ }
+
+ @Override
+ public synchronized void setKeepAliveApplications(List<ApplicationId> appIds) {
+ if (appIds == null) {
+ builder.clearKeepAliveApplications();
+ }
+ this.keepAliveApplications = appIds;
+ }
- private void initContainers() {
+ private synchronized void initContainers() {
if (this.containers != null) {
return;
}
@@ -185,8 +243,22 @@ public class NodeStatusPBImpl extends Pr
}
+ private synchronized void initKeepAliveApplications() {
+ if (this.keepAliveApplications != null) {
+ return;
+ }
+ NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
+ List<ApplicationIdProto> list = p.getKeepAliveApplicationsList();
+ this.keepAliveApplications = new ArrayList<ApplicationId>();
+
+ for (ApplicationIdProto c : list) {
+ this.keepAliveApplications.add(convertFromProtoFormat(c));
+ }
+
+ }
+
@Override
- public NodeHealthStatus getNodeHealthStatus() {
+ public synchronized NodeHealthStatus getNodeHealthStatus() {
NodeStatusProtoOrBuilder p = viaProto ? proto : builder;
if (nodeHealthStatus != null) {
return nodeHealthStatus;
@@ -199,7 +271,7 @@ public class NodeStatusPBImpl extends Pr
}
@Override
- public void setNodeHealthStatus(NodeHealthStatus healthStatus) {
+ public synchronized void setNodeHealthStatus(NodeHealthStatus healthStatus) {
maybeInitBuilder();
if (healthStatus == null) {
builder.clearNodeHealthStatus();
@@ -231,4 +303,12 @@ public class NodeStatusPBImpl extends Pr
private ContainerStatusProto convertToProtoFormat(ContainerStatus c) {
return ((ContainerStatusPBImpl)c).getProto();
}
-}
+
+ private ApplicationIdPBImpl convertFromProtoFormat(ApplicationIdProto c) {
+ return new ApplicationIdPBImpl(c);
+ }
+
+ private ApplicationIdProto convertToProtoFormat(ApplicationId c) {
+ return ((ApplicationIdPBImpl)c).getProto();
+ }
+}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/proto/yarn_server_common_protos.proto Wed Dec 14 23:34:04 2011
@@ -34,6 +34,7 @@ message NodeStatusProto {
optional int32 response_id = 2;
repeated ContainerStatusProto containersStatuses = 3;
optional NodeHealthStatusProto nodeHealthStatus = 4;
+ repeated ApplicationIdProto keep_alive_applications = 5;
}
message RegistrationResponseProto {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-common/src/main/resources/yarn-default.xml Wed Dec 14 23:34:04 2011
@@ -72,7 +72,7 @@
<property>
<description>The expiry interval for application master reporting.</description>
- <name>yarn.resourcemanager.am.liveness-monitor.expiry-interval-ms</name>
+ <name>yarn.am.liveness-monitor.expiry-interval-ms</name>
<value>600000</value>
</property>
@@ -155,7 +155,7 @@
<property>
<description>How long to wait until a node manager is considered dead.</description>
- <name>yarn.resourcemanager.nm.liveness-monitor.expiry-interval-ms</name>
+ <name>yarn.nm.liveness-monitor.expiry-interval-ms</name>
<value>600000</value>
</property>
@@ -210,6 +210,12 @@
<value>10000</value>
</property>
+ <property>
+ <description>Interval at which the delayed token removal thread runs</description>
+ <name>yarn.resourcemanager.delayed.delegation-token.removal-interval-ms</name>
+ <value>30000</value>
+ </property>
+
<!-- Node Manager Configs -->
<property>
<description>address of node manager IPC.</description>
@@ -304,7 +310,7 @@
<property>
<description>Whether to enable log aggregation</description>
- <name>yarn.nodemanager.log-aggregation-enable</name>
+ <name>yarn.log-aggregation-enable</name>
<value>false</value>
</property>
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/NodeStatusUpdaterImpl.java Wed Dec 14 23:34:04 2011
@@ -20,8 +20,12 @@ package org.apache.hadoop.yarn.server.no
import java.net.InetSocketAddress;
import java.util.ArrayList;
+import java.util.Collections;
+import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
+import java.util.Map;
+import java.util.Random;
import java.util.Map.Entry;
import org.apache.avro.AvroRuntimeException;
@@ -56,6 +60,7 @@ import org.apache.hadoop.yarn.server.nod
import org.apache.hadoop.yarn.server.security.ContainerTokenSecretManager;
import org.apache.hadoop.yarn.service.AbstractService;
+
public class NodeStatusUpdaterImpl extends AbstractService implements
NodeStatusUpdater {
@@ -76,6 +81,12 @@ public class NodeStatusUpdaterImpl exten
private byte[] secretKeyBytes = new byte[0];
private boolean isStopped;
private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
+ private boolean tokenKeepAliveEnabled;
+ private long tokenRemovalDelayMs;
+ /** Keeps track of when the next keep alive request should be sent for an app*/
+ private Map<ApplicationId, Long> appTokenKeepAliveMap =
+ new HashMap<ApplicationId, Long>();
+ private Random keepAliveDelayRandom = new Random();
private final NodeHealthCheckerService healthChecker;
private final NodeManagerMetrics metrics;
@@ -103,6 +114,13 @@ public class NodeStatusUpdaterImpl exten
this.totalResource = recordFactory.newRecordInstance(Resource.class);
this.totalResource.setMemory(memoryMb);
metrics.addResource(totalResource);
+ this.tokenKeepAliveEnabled =
+ conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)
+ && isSecurityEnabled();
+ this.tokenRemovalDelayMs =
+ conf.getInt(YarnConfiguration.RM_NM_EXPIRY_INTERVAL_MS,
+ YarnConfiguration.DEFAULT_RM_NM_EXPIRY_INTERVAL_MS);
super.init(conf);
}
@@ -139,6 +157,10 @@ public class NodeStatusUpdaterImpl exten
super.stop();
}
+ protected boolean isSecurityEnabled() {
+ return UserGroupInformation.isSecurityEnabled();
+ }
+
protected ResourceTracker getRMClient() {
Configuration conf = getConfig();
YarnRPC rpc = YarnRPC.create(conf);
@@ -188,6 +210,29 @@ public class NodeStatusUpdaterImpl exten
return this.secretKeyBytes.clone();
}
+ private List<ApplicationId> createKeepAliveApplicationList() {
+ if (!tokenKeepAliveEnabled) {
+ return Collections.emptyList();
+ }
+
+ List<ApplicationId> appList = new ArrayList<ApplicationId>();
+ for (Iterator<Entry<ApplicationId, Long>> i =
+ this.appTokenKeepAliveMap.entrySet().iterator(); i.hasNext();) {
+ Entry<ApplicationId, Long> e = i.next();
+ ApplicationId appId = e.getKey();
+ Long nextKeepAlive = e.getValue();
+ if (!this.context.getApplications().containsKey(appId)) {
+ // Remove if the application has finished.
+ i.remove();
+ } else if (System.currentTimeMillis() > nextKeepAlive) {
+ // KeepAlive list for the next hearbeat.
+ appList.add(appId);
+ trackAppForKeepAlive(appId);
+ }
+ }
+ return appList;
+ }
+
private NodeStatus getNodeStatus() {
NodeStatus nodeStatus = recordFactory.newRecordInstance(NodeStatus.class);
@@ -231,9 +276,29 @@ public class NodeStatusUpdaterImpl exten
}
nodeStatus.setNodeHealthStatus(nodeHealthStatus);
+ List<ApplicationId> keepAliveAppIds = createKeepAliveApplicationList();
+ nodeStatus.setKeepAliveApplications(keepAliveAppIds);
+
return nodeStatus;
}
+ private void trackAppsForKeepAlive(List<ApplicationId> appIds) {
+ if (tokenKeepAliveEnabled && appIds != null && appIds.size() > 0) {
+ for (ApplicationId appId : appIds) {
+ trackAppForKeepAlive(appId);
+ }
+ }
+ }
+
+ private void trackAppForKeepAlive(ApplicationId appId) {
+ // Next keepAlive request for app between 0.7 & 0.9 of when the token will
+ // likely expire.
+ long nextTime = System.currentTimeMillis()
+ + (long) (0.7 * tokenRemovalDelayMs + (0.2 * tokenRemovalDelayMs
+ * keepAliveDelayRandom.nextInt(100))/100);
+ appTokenKeepAliveMap.put(appId, nextTime);
+ }
+
@Override
public void sendOutofBandHeartBeat() {
synchronized (this.heartbeatMonitor) {
@@ -245,6 +310,7 @@ public class NodeStatusUpdaterImpl exten
new Thread("Node Status Updater") {
@Override
+ @SuppressWarnings("unchecked")
public void run() {
int lastHeartBeatID = 0;
while (!isStopped) {
@@ -284,6 +350,8 @@ public class NodeStatusUpdaterImpl exten
}
List<ApplicationId> appsToCleanup =
response.getApplicationsToCleanupList();
+ //Only start tracking for keepAlive on FINISH_APP
+ trackAppsForKeepAlive(appsToCleanup);
if (appsToCleanup.size() != 0) {
dispatcher.getEventHandler().handle(
new CMgrCompletedAppsEvent(appsToCleanup));
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/ContainerManagerImpl.java Wed Dec 14 23:34:04 2011
@@ -192,8 +192,8 @@ public class ContainerManagerImpl extend
protected LogHandler createLogHandler(Configuration conf, Context context,
DeletionService deletionService) {
- if (conf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
- YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
+ if (conf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
return new LogAggregationService(this.dispatcher, context,
deletionService, dirsHandler);
} else {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/logaggregation/AppLogAggregatorImpl.java Wed Dec 14 23:34:04 2011
@@ -170,6 +170,7 @@ public class AppLogAggregatorImpl implem
this.writer.closeWriter();
LOG.info("Finished aggregate log-file for app " + this.applicationId);
}
+
try {
userUgi.doAs(new PrivilegedExceptionAction<Object>() {
@Override
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java Wed Dec 14 23:34:04 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -88,13 +88,11 @@ public class AllApplicationsPage extends
.tbody();
for (Entry<ApplicationId, Application> entry : this.nmContext
.getApplications().entrySet()) {
- ApplicationId appId = entry.getKey();
- Application app = entry.getValue();
- String appIdStr = ConverterUtils.toString(appId);
+ AppInfo info = new AppInfo(entry.getValue());
tableBody
.tr()
- .td().a(url("application", appIdStr), appIdStr)._()
- .td()._(app.getApplicationState())
+ .td().a(url("application", info.getId()), info.getId())._()
+ .td()._(info.getState())
._()
._();
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java Wed Dec 14 23:34:04 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -83,17 +83,14 @@ public class AllContainersPage extends N
._().tbody();
for (Entry<ContainerId, Container> entry : this.nmContext
.getContainers().entrySet()) {
- ContainerId containerId = entry.getKey();
- Container container = entry.getValue();
- String containerIdStr = ConverterUtils.toString(containerId);
+ ContainerInfo info = new ContainerInfo(this.nmContext, entry.getValue());
tableBody
.tr()
- .td().a(url("container", containerIdStr), containerIdStr)
+ .td().a(url("container", info.getId()), info.getId())
._()
- .td()._(container.getContainerState())._()
+ .td()._(info.getState())._()
.td()
- .a(url("containerlogs", containerIdStr, container.getUser()),
- "logs")._()
+ .a(url(info.getShortLogLink()), "logs")._()
._();
}
tableBody._()._()._();
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java Wed Dec 14 23:34:04 2011
@@ -23,19 +23,16 @@ import static org.apache.hadoop.yarn.web
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-import java.util.Map;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -81,15 +78,14 @@ public class ApplicationPage extends NMV
ConverterUtils.toApplicationId(this.recordFactory,
$(APPLICATION_ID));
Application app = this.nmContext.getApplications().get(applicationID);
- Map<ContainerId, Container> containers = app.getContainers();
+ AppInfo info = new AppInfo(app);
info("Application's information")
- ._("ApplicationId", ConverterUtils.toString(app.getAppId()))
- ._("ApplicationState", app.getApplicationState().toString())
- ._("User", app.getUser());
+ ._("ApplicationId", info.getId())
+ ._("ApplicationState", info.getState())
+ ._("User", info.getUser());
TABLE<Hamlet> containersListBody = html._(InfoBlock.class)
.table("#containers");
- for (ContainerId containerId : containers.keySet()) {
- String containerIdStr = ConverterUtils.toString(containerId);
+ for (String containerIdStr : info.getContainers()) {
containersListBody
.tr().td()
.a(url("container", containerIdStr), containerIdStr)
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java Wed Dec 14 23:34:04 2011
@@ -18,18 +18,16 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
-import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -77,21 +75,16 @@ public class ContainerPage extends NMVie
+ "please go back to the previous page and retry.")._();
return;
}
- ContainerStatus containerData = container.cloneAndGetContainerStatus();
- int exitCode = containerData.getExitStatus();
- String exiStatus =
- (exitCode == YarnConfiguration.INVALID_CONTAINER_EXIT_STATUS) ?
- "N/A" : String.valueOf(exitCode);
+ ContainerInfo info = new ContainerInfo(this.nmContext, container);
+
info("Container information")
- ._("ContainerID", $(CONTAINER_ID))
- ._("ContainerState", container.getContainerState())
- ._("ExitStatus", exiStatus)
- ._("Diagnostics", containerData.getDiagnostics())
- ._("User", container.getUser())
- ._("TotalMemoryNeeded",
- container.getLaunchContext().getResource().getMemory())
- ._("logs", ujoin("containerlogs", $(CONTAINER_ID), container.getUser()),
- "Link to logs");
+ ._("ContainerID", info.getId())
+ ._("ContainerState", info.getState())
+ ._("ExitStatus", info.getExitStatus())
+ ._("Diagnostics", info.getDiagnostics())
+ ._("User", info.getUser())
+ ._("TotalMemoryNeeded", info.getMemoryNeeded())
+ ._("logs", info.getShortLogLink(), "Link to logs");
html._(InfoBlock.class);
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMController.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMController.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMController.java Wed Dec 14 23:34:04 2011
@@ -88,8 +88,8 @@ public class NMController extends Contro
containerId.getApplicationAttemptId().getApplicationId();
Application app = nmContext.getApplications().get(appId);
if (app == null
- && nmConf.getBoolean(YarnConfiguration.NM_LOG_AGGREGATION_ENABLED,
- YarnConfiguration.DEFAULT_NM_LOG_AGGREGATION_ENABLED)) {
+ && nmConf.getBoolean(YarnConfiguration.LOG_AGGREGATION_ENABLED,
+ YarnConfiguration.DEFAULT_LOG_AGGREGATION_ENABLED)) {
String logServerUrl = nmConf.get(YarnConfiguration.YARN_LOG_SERVER_URL);
String redirectUrl = null;
if (logServerUrl == null || logServerUrl.isEmpty()) {
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java Wed Dec 14 23:34:04 2011
@@ -23,10 +23,10 @@ import static org.apache.hadoop.yarn.web
import java.util.Date;
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.util.YarnVersionInfo;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NodeInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -36,6 +36,8 @@ import com.google.inject.Inject;
public class NodePage extends NMView {
+ private static final long BYTES_IN_MB = 1024 * 1024;
+
@Override
protected void commonPreHead(HTML<_> html) {
super.commonPreHead(html);
@@ -60,21 +62,22 @@ public class NodePage extends NMView {
@Override
protected void render(Block html) {
+ NodeInfo info = new NodeInfo(this.context, this.resourceView);
info("NodeManager information")
._("Total Vmem allocated for Containers",
- this.resourceView.getVmemAllocatedForContainers() + "bytes")
+ StringUtils.byteDesc(info.getTotalVmemAllocated() * BYTES_IN_MB))
._("Total Pmem allocated for Container",
- this.resourceView.getPmemAllocatedForContainers() + "bytes")
+ StringUtils.byteDesc(info.getTotalPmemAllocated() * BYTES_IN_MB))
._("NodeHealthyStatus",
- this.context.getNodeHealthStatus().getIsNodeHealthy())
+ info.getHealthStatus())
._("LastNodeHealthTime", new Date(
- this.context.getNodeHealthStatus().getLastHealthReportTime()))
+ info.getLastNodeUpdateTime()))
._("NodeHealthReport",
- this.context.getNodeHealthStatus().getHealthReport())
- ._("Node Manager Version:", YarnVersionInfo.getBuildVersion() +
- " on " + YarnVersionInfo.getDate())
- ._("Hadoop Version:", VersionInfo.getBuildVersion() +
- " on " + VersionInfo.getDate());
+ info.getHealthReport())
+ ._("Node Manager Version:", info.getNMBuildVersion() +
+ " on " + info.getNMVersionBuiltOn())
+ ._("Hadoop Version:", info.getHadoopBuildVersion() +
+ " on " + info.getHadoopVersionBuiltOn());
html._(InfoBlock.class);
}
}
Modified: hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java?rev=1214546&r1=1214545&r2=1214546&view=diff
==============================================================================
--- hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java (original)
+++ hadoop/common/branches/HDFS-1623/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java Wed Dec 14 23:34:04 2011
@@ -30,9 +30,10 @@ import org.apache.hadoop.yarn.server.nod
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.service.AbstractService;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
public class WebServer extends AbstractService {
@@ -61,8 +62,9 @@ public class WebServer extends AbstractS
YarnConfiguration.DEFAULT_NM_WEBAPP_ADDRESS);
LOG.info("Instantiating NMWebApp at " + bindAddress);
try {
- this.webApp = WebApps.$for("node", Context.class, this.nmContext).at(
- bindAddress).with(getConfig()).start(this.nmWebApp);
+ this.webApp =
+ WebApps.$for("node", Context.class, this.nmContext, "ws")
+ .at(bindAddress).with(getConfig()).start(this.nmWebApp);
} catch (Exception e) {
String msg = "NMWebapps failed to start.";
LOG.error(msg, e);
@@ -95,6 +97,9 @@ public class WebServer extends AbstractS
@Override
public void setup() {
+ bind(NMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(JAXBContextResolver.class);
bind(ResourceView.class).toInstance(this.resourceView);
bind(ApplicationACLsManager.class).toInstance(this.aclsManager);
bind(LocalDirsHandlerService.class).toInstance(dirsHandler);