You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/12/14 00:12:36 UTC
svn commit: r1213978 [2/3] - in
/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project: ./
hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/
hadoop-mapreduce-clie...
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Tue Dec 13 23:12:33 2011
@@ -20,12 +20,13 @@ package org.apache.hadoop.mapreduce.v2.h
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times;
@@ -65,7 +66,7 @@ public class HsTasksBlock extends HtmlBl
if (!symbol.isEmpty()) {
type = MRApps.taskType(symbol);
}
-
+
THEAD<TABLE<Hamlet>> thead = html.table("#tasks").thead();
//Create the spanning row
int attemptColSpan = type == TaskType.REDUCE ? 8 : 3;
@@ -74,7 +75,7 @@ public class HsTasksBlock extends HtmlBl
th().$colspan(attemptColSpan).$class("ui-state-default").
_("Successful Attempt")._().
_();
-
+
TR<THEAD<TABLE<Hamlet>>> theadRow = thead.
tr().
th("Name").
@@ -83,33 +84,33 @@ public class HsTasksBlock extends HtmlBl
th("Finish Time").
th("Elapsed Time").
th("Start Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Shuffle Finish Time"); //Attempt
theadRow.th("Merge Finish Time"); //Attempt
}
-
+
theadRow.th("Finish Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Elapsed Time Shuffle"); //Attempt
theadRow.th("Elapsed Time Merge"); //Attempt
theadRow.th("Elapsed Time Reduce"); //Attempt
}
theadRow.th("Elapsed Time"); //Attempt
-
+
TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}
- String tid = MRApps.toString(task.getID());
-
- TaskReport report = task.getReport();
- long startTime = report.getStartTime();
- long finishTime = report.getFinishTime();
- long elapsed = Times.elapsed(startTime, finishTime, false);
-
+ TaskInfo info = new TaskInfo(task);
+ String tid = info.getId();
+
+ long startTime = info.getStartTime();
+ long finishTime = info.getFinishTime();
+ long elapsed = info.getElapsedTime();
+
long attemptStartTime = -1;
long shuffleFinishTime = -1;
long sortFinishTime = -1;
@@ -118,30 +119,31 @@ public class HsTasksBlock extends HtmlBl
long elapsedSortTime = -1;;
long elapsedReduceTime = -1;
long attemptElapsed = -1;
- TaskAttempt successful = getSuccessfulAttempt(task);
+ TaskAttempt successful = info.getSuccessful();
if(successful != null) {
- attemptStartTime = successful.getLaunchTime();
- attemptFinishTime = successful.getFinishTime();
+ TaskAttemptInfo ta;
if(type == TaskType.REDUCE) {
- shuffleFinishTime = successful.getShuffleFinishTime();
- sortFinishTime = successful.getSortFinishTime();
- elapsedShuffleTime =
- Times.elapsed(attemptStartTime, shuffleFinishTime, false);
- elapsedSortTime =
- Times.elapsed(shuffleFinishTime, sortFinishTime, false);
- elapsedReduceTime =
- Times.elapsed(sortFinishTime, attemptFinishTime, false);
+ ReduceTaskAttemptInfo rta = new ReduceTaskAttemptInfo(successful, type);
+ shuffleFinishTime = rta.getShuffleFinishTime();
+ sortFinishTime = rta.getMergeFinishTime();
+ elapsedShuffleTime = rta.getElapsedShuffleTime();
+ elapsedSortTime = rta.getElapsedMergeTime();
+ elapsedReduceTime = rta.getElapsedReduceTime();
+ ta = rta;
+ } else {
+ ta = new TaskAttemptInfo(successful, type, false);
}
- attemptElapsed =
- Times.elapsed(attemptStartTime, attemptFinishTime, false);
+ attemptStartTime = ta.getStartTime();
+ attemptFinishTime = ta.getFinishTime();
+ attemptElapsed = ta.getElapsedTime();
}
-
+
TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
row.
td().
- br().$title(String.valueOf(task.getID().getId()))._(). // sorting
+ br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
a(url("task", tid), tid)._().
- td(report.getTaskState().toString()).
+ td(info.getState()).
td().
br().$title(String.valueOf(startTime))._().
_(Times.format(startTime))._().
@@ -166,7 +168,7 @@ public class HsTasksBlock extends HtmlBl
td().
br().$title(String.valueOf(attemptFinishTime))._().
_(Times.format(attemptFinishTime))._();
-
+
if(type == TaskType.REDUCE) {
row.td().
br().$title(String.valueOf(elapsedShuffleTime))._().
@@ -178,7 +180,7 @@ public class HsTasksBlock extends HtmlBl
br().$title(String.valueOf(elapsedReduceTime))._().
_(formatTime(elapsedReduceTime))._();
}
-
+
row.td().
br().$title(String.valueOf(attemptElapsed))._().
_(formatTime(attemptElapsed))._();
@@ -194,7 +196,7 @@ public class HsTasksBlock extends HtmlBl
.$type(InputType.text).$name("elapsed_time").$value("Elapsed Time")._()
._().th().input("search_init").$type(InputType.text)
.$name("attempt_start_time").$value("Start Time")._()._();
-
+
if(type == TaskType.REDUCE) {
footRow.th().input("search_init").$type(InputType.text)
.$name("shuffle_time").$value("Shuffle Time")._()._();
@@ -216,20 +218,12 @@ public class HsTasksBlock extends HtmlBl
footRow.th().input("search_init").$type(InputType.text)
.$name("attempt_elapsed").$value("Elapsed Time")._()._();
-
+
footRow._()._()._();
}
private String formatTime(long elapsed) {
return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed);
}
-
- private TaskAttempt getSuccessfulAttempt(Task task) {
- for(TaskAttempt attempt: task.getAttempts().values()) {
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- return attempt;
- }
- }
- return null;
- }
+
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java Tue Dec 13 23:12:33 2011
@@ -27,6 +27,7 @@ import static org.apache.hadoop.yarn.web
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
public class HsWebApp extends WebApp implements AMParams {
@@ -39,6 +40,9 @@ public class HsWebApp extends WebApp imp
@Override
public void setup() {
+ bind(HsWebServices.class);
+ bind(JAXBContextResolver.class);
+ bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(history);
route("/", HsController.class);
route("/app", HsController.class);
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Tue Dec 13 23:12:33 2011
@@ -238,7 +238,7 @@
<dependency>
<groupId>com.google.inject.extensions</groupId>
<artifactId>guice-servlet</artifactId>
- <version>2.0</version>
+ <version>3.0</version>
</dependency>
<dependency>
<groupId>junit</groupId>
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java Tue Dec 13 23:12:33 2011
@@ -18,24 +18,29 @@
package org.apache.hadoop.yarn.webapp;
-import com.google.common.base.CharMatcher;
-import static com.google.common.base.Preconditions.*;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.inject.Provides;
-import com.google.inject.servlet.GuiceFilter;
-import com.google.inject.servlet.ServletModule;
+import static com.google.common.base.Preconditions.checkNotNull;
import java.util.ArrayList;
+import java.util.HashMap;
import java.util.List;
+import java.util.Map;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.http.HttpServer;
-import org.apache.hadoop.yarn.util.StringHelper;
-
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import com.google.common.base.CharMatcher;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import com.google.inject.Provides;
+import com.google.inject.servlet.GuiceFilter;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.core.ResourceConfig;
+import com.sun.jersey.core.util.FeaturesAndProperties;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
/**
* @see WebApps for a usage example
*/
@@ -45,9 +50,10 @@ public abstract class WebApp extends Ser
public enum HTTP { GET, POST, HEAD, PUT, DELETE };
private volatile String name;
- private volatile List<String> servePathSpecs = new ArrayList<String>();
+ private volatile List<String> servePathSpecs = new ArrayList<String>();
// path to redirect to if user goes to "/"
private volatile String redirectPath;
+ private volatile String wsName;
private volatile Configuration conf;
private volatile HttpServer httpServer;
private volatile GuiceFilter guiceFilter;
@@ -104,18 +110,20 @@ public abstract class WebApp extends Ser
void addServePathSpec(String path) { this.servePathSpecs.add(path); }
- public String[] getServePathSpecs() {
+ public String[] getServePathSpecs() {
return this.servePathSpecs.toArray(new String[this.servePathSpecs.size()]);
}
/**
- * Set a path to redirect the user to if they just go to "/". For
- * instance "/" goes to "/yarn/apps". This allows the filters to
+ * Set a path to redirect the user to if they just go to "/". For
+ * instance "/" goes to "/yarn/apps". This allows the filters to
* more easily differentiate the different webapps.
* @param path the path to redirect to
*/
void setRedirectPath(String path) { this.redirectPath = path; }
+ void setWebServices (String name) { this.wsName = name; }
+
public String getRedirectPath() { return this.redirectPath; }
void setHostClass(Class<?> cls) {
@@ -129,10 +137,32 @@ public abstract class WebApp extends Ser
@Override
public void configureServlets() {
setup();
+
serve("/", "/__stop").with(Dispatcher.class);
+
for (String path : this.servePathSpecs) {
serve(path).with(Dispatcher.class);
}
+
+ // Add in the web services filters/serves if app has them.
+ // Using Jersey/guice integration module. If user has web services
+ // they must have also bound a default one in their webapp code.
+ if (this.wsName != null) {
+ // There seems to be an issue with the guice/jersey integration
+ // where we have to list the stuff we don't want it to serve
+ // through the guicecontainer. In this case its everything except
+ // the the web services api prefix. We can't just change the filter
+ // from /* below - that doesn't work.
+ String regex = "(?!/" + this.wsName + ")";
+ serveRegex(regex).with(DefaultWrapperServlet.class);
+
+ Map<String, String> params = new HashMap<String, String>();
+ params.put(ResourceConfig.FEATURE_IMPLICIT_VIEWABLES, "true");
+ params.put(ServletContainer.FEATURE_FILTER_FORWARD_ON_404, "true");
+ params.put(FeaturesAndProperties.FEATURE_XMLROOTELEMENT_PROCESSING, "true");
+ filter("/*").through(GuiceContainer.class, params);
+ }
+
}
/**
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java Tue Dec 13 23:12:33 2011
@@ -72,6 +72,7 @@ public class WebApps {
}
final String name;
+ final String wsName;
final Class<T> api;
final T application;
String bindAddress = "0.0.0.0";
@@ -82,10 +83,15 @@ public class WebApps {
private final HashSet<ServletStruct> servlets = new HashSet<ServletStruct>();
private final HashMap<String, Object> attributes = new HashMap<String, Object>();
- Builder(String name, Class<T> api, T application) {
+ Builder(String name, Class<T> api, T application, String wsName) {
this.name = name;
this.api = api;
this.application = application;
+ this.wsName = wsName;
+ }
+
+ Builder(String name, Class<T> api, T application) {
+ this(name, api, application, null);
}
public Builder<T> at(String bindAddress) {
@@ -142,6 +148,7 @@ public class WebApps {
};
}
webapp.setName(name);
+ webapp.setWebServices(wsName);
String basePath = "/" + name;
webapp.setRedirectPath(basePath);
if (basePath.equals("/")) {
@@ -150,6 +157,14 @@ public class WebApps {
webapp.addServePathSpec(basePath);
webapp.addServePathSpec(basePath + "/*");
}
+ if (wsName != null && !wsName.equals(basePath)) {
+ if (wsName.equals("/")) {
+ webapp.addServePathSpec("/*");
+ } else {
+ webapp.addServePathSpec("/" + wsName);
+ webapp.addServePathSpec("/" + wsName + "/*");
+ }
+ }
if (conf == null) {
conf = new Configuration();
}
@@ -238,6 +253,20 @@ public class WebApps {
* @param prefix of the webapp
* @param api the api class for the application
* @param app the application instance
+ * @param wsPrefix the prefix for the webservice api for this app
+ * @return a webapp builder
+ */
+ public static <T> Builder<T> $for(String prefix, Class<T> api, T app, String wsPrefix) {
+ return new Builder<T>(prefix, api, app, wsPrefix);
+ }
+
+ /**
+ * Create a new webapp builder.
+ * @see WebApps for a complete example
+ * @param <T> application (holding the embedded webapp) type
+ * @param prefix of the webapp
+ * @param api the api class for the application
+ * @param app the application instance
* @return a webapp builder
*/
public static <T> Builder<T> $for(String prefix, Class<T> api, T app) {
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java Tue Dec 13 23:12:33 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -88,13 +88,11 @@ public class AllApplicationsPage extends
.tbody();
for (Entry<ApplicationId, Application> entry : this.nmContext
.getApplications().entrySet()) {
- ApplicationId appId = entry.getKey();
- Application app = entry.getValue();
- String appIdStr = ConverterUtils.toString(appId);
+ AppInfo info = new AppInfo(entry.getValue());
tableBody
.tr()
- .td().a(url("application", appIdStr), appIdStr)._()
- .td()._(app.getApplicationState())
+ .td().a(url("application", info.getId()), info.getId())._()
+ .td()._(info.getState())
._()
._();
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java Tue Dec 13 23:12:33 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -83,17 +83,14 @@ public class AllContainersPage extends N
._().tbody();
for (Entry<ContainerId, Container> entry : this.nmContext
.getContainers().entrySet()) {
- ContainerId containerId = entry.getKey();
- Container container = entry.getValue();
- String containerIdStr = ConverterUtils.toString(containerId);
+ ContainerInfo info = new ContainerInfo(this.nmContext, entry.getValue());
tableBody
.tr()
- .td().a(url("container", containerIdStr), containerIdStr)
+ .td().a(url("container", info.getId()), info.getId())
._()
- .td()._(container.getContainerState())._()
+ .td()._(info.getState())._()
.td()
- .a(url("containerlogs", containerIdStr, container.getUser()),
- "logs")._()
+ .a(url(info.getShortLogLink()), "logs")._()
._();
}
tableBody._()._()._();
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java Tue Dec 13 23:12:33 2011
@@ -23,19 +23,16 @@ import static org.apache.hadoop.yarn.web
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
-import java.util.Map;
-
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -81,15 +78,14 @@ public class ApplicationPage extends NMV
ConverterUtils.toApplicationId(this.recordFactory,
$(APPLICATION_ID));
Application app = this.nmContext.getApplications().get(applicationID);
- Map<ContainerId, Container> containers = app.getContainers();
+ AppInfo info = new AppInfo(app);
info("Application's information")
- ._("ApplicationId", ConverterUtils.toString(app.getAppId()))
- ._("ApplicationState", app.getApplicationState().toString())
- ._("User", app.getUser());
+ ._("ApplicationId", info.getId())
+ ._("ApplicationState", info.getState())
+ ._("User", info.getUser());
TABLE<Hamlet> containersListBody = html._(InfoBlock.class)
.table("#containers");
- for (ContainerId containerId : containers.keySet()) {
- String containerIdStr = ConverterUtils.toString(containerId);
+ for (String containerIdStr : info.getContainers()) {
containersListBody
.tr().td()
.a(url("container", containerIdStr), containerIdStr)
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java Tue Dec 13 23:12:33 2011
@@ -18,18 +18,16 @@
package org.apache.hadoop.yarn.server.nodemanager.webapp;
-import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -77,21 +75,16 @@ public class ContainerPage extends NMVie
+ "please go back to the previous page and retry.")._();
return;
}
- ContainerStatus containerData = container.cloneAndGetContainerStatus();
- int exitCode = containerData.getExitStatus();
- String exiStatus =
- (exitCode == YarnConfiguration.INVALID_CONTAINER_EXIT_STATUS) ?
- "N/A" : String.valueOf(exitCode);
+ ContainerInfo info = new ContainerInfo(this.nmContext, container);
+
info("Container information")
- ._("ContainerID", $(CONTAINER_ID))
- ._("ContainerState", container.getContainerState())
- ._("ExitStatus", exiStatus)
- ._("Diagnostics", containerData.getDiagnostics())
- ._("User", container.getUser())
- ._("TotalMemoryNeeded",
- container.getLaunchContext().getResource().getMemory())
- ._("logs", ujoin("containerlogs", $(CONTAINER_ID), container.getUser()),
- "Link to logs");
+ ._("ContainerID", info.getId())
+ ._("ContainerState", info.getState())
+ ._("ExitStatus", info.getExitStatus())
+ ._("Diagnostics", info.getDiagnostics())
+ ._("User", info.getUser())
+ ._("TotalMemoryNeeded", info.getMemoryNeeded())
+ ._("logs", info.getShortLogLink(), "Link to logs");
html._(InfoBlock.class);
}
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java Tue Dec 13 23:12:33 2011
@@ -23,10 +23,10 @@ import static org.apache.hadoop.yarn.web
import java.util.Date;
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.util.YarnVersionInfo;
+import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.server.nodemanager.Context;
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NodeInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -36,6 +36,8 @@ import com.google.inject.Inject;
public class NodePage extends NMView {
+ private static final long BYTES_IN_MB = 1024 * 1024;
+
@Override
protected void commonPreHead(HTML<_> html) {
super.commonPreHead(html);
@@ -60,21 +62,22 @@ public class NodePage extends NMView {
@Override
protected void render(Block html) {
+ NodeInfo info = new NodeInfo(this.context, this.resourceView);
info("NodeManager information")
._("Total Vmem allocated for Containers",
- this.resourceView.getVmemAllocatedForContainers() + "bytes")
+ StringUtils.byteDesc(info.getTotalVmemAllocated() * BYTES_IN_MB))
._("Total Pmem allocated for Container",
- this.resourceView.getPmemAllocatedForContainers() + "bytes")
+ StringUtils.byteDesc(info.getTotalPmemAllocated() * BYTES_IN_MB))
._("NodeHealthyStatus",
- this.context.getNodeHealthStatus().getIsNodeHealthy())
+ info.getHealthStatus())
._("LastNodeHealthTime", new Date(
- this.context.getNodeHealthStatus().getLastHealthReportTime()))
+ info.getLastNodeUpdateTime()))
._("NodeHealthReport",
- this.context.getNodeHealthStatus().getHealthReport())
- ._("Node Manager Version:", YarnVersionInfo.getBuildVersion() +
- " on " + YarnVersionInfo.getDate())
- ._("Hadoop Version:", VersionInfo.getBuildVersion() +
- " on " + VersionInfo.getDate());
+ info.getHealthReport())
+ ._("Node Manager Version:", info.getNMBuildVersion() +
+ " on " + info.getNMVersionBuiltOn())
+ ._("Hadoop Version:", info.getHadoopBuildVersion() +
+ " on " + info.getHadoopVersionBuiltOn());
html._(InfoBlock.class);
}
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java Tue Dec 13 23:12:33 2011
@@ -30,9 +30,10 @@ import org.apache.hadoop.yarn.server.nod
import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.service.AbstractService;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
import org.apache.hadoop.yarn.webapp.WebApps;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
public class WebServer extends AbstractService {
@@ -61,8 +62,9 @@ public class WebServer extends AbstractS
YarnConfiguration.DEFAULT_NM_WEBAPP_ADDRESS);
LOG.info("Instantiating NMWebApp at " + bindAddress);
try {
- this.webApp = WebApps.$for("node", Context.class, this.nmContext).at(
- bindAddress).with(getConfig()).start(this.nmWebApp);
+ this.webApp =
+ WebApps.$for("node", Context.class, this.nmContext, "ws")
+ .at(bindAddress).with(getConfig()).start(this.nmWebApp);
} catch (Exception e) {
String msg = "NMWebapps failed to start.";
LOG.error(msg, e);
@@ -95,6 +97,9 @@ public class WebServer extends AbstractS
@Override
public void setup() {
+ bind(NMWebServices.class);
+ bind(GenericExceptionHandler.class);
+ bind(JAXBContextResolver.class);
bind(ResourceView.class).toInstance(this.resourceView);
bind(ApplicationACLsManager.class).toInstance(this.aclsManager);
bind(LocalDirsHandlerService.class).toInstance(dirsHandler);
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java Tue Dec 13 23:12:33 2011
@@ -412,7 +412,7 @@ public class ResourceManager extends Com
protected void startWepApp() {
Builder<ApplicationMasterService> builder =
- WebApps.$for("cluster", masterService).at(
+ WebApps.$for("cluster", ApplicationMasterService.class, masterService, "ws").at(
this.conf.get(YarnConfiguration.RM_WEBAPP_ADDRESS,
YarnConfiguration.DEFAULT_RM_WEBAPP_ADDRESS));
if(YarnConfiguration.getRMWebAppHostAndPort(conf).
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java Tue Dec 13 23:12:33 2011
@@ -18,10 +18,9 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
-import org.apache.hadoop.util.VersionInfo;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterInfo;
import org.apache.hadoop.yarn.util.Times;
-import org.apache.hadoop.yarn.util.YarnVersionInfo;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
@@ -30,25 +29,25 @@ import com.google.inject.Inject;
public class AboutBlock extends HtmlBlock {
final ResourceManager rm;
- @Inject
+ @Inject
AboutBlock(ResourceManager rm, ViewContext ctx) {
super(ctx);
this.rm = rm;
}
-
+
@Override
protected void render(Block html) {
html._(MetricsOverviewTable.class);
- long ts = ResourceManager.clusterTimeStamp;
ResourceManager rm = getInstance(ResourceManager.class);
+ ClusterInfo cinfo = new ClusterInfo(rm);
info("Cluster overview").
- _("Cluster ID:", ts).
- _("ResourceManager state:", rm.getServiceState()).
- _("ResourceManager started on:", Times.format(ts)).
- _("ResourceManager version:", YarnVersionInfo.getBuildVersion() +
- " on " + YarnVersionInfo.getDate()).
- _("Hadoop version:", VersionInfo.getBuildVersion() +
- " on " + VersionInfo.getDate());
+ _("Cluster ID:", cinfo.getClusterId()).
+ _("ResourceManager state:", cinfo.getState()).
+ _("ResourceManager started on:", Times.format(cinfo.getStartedOn())).
+ _("ResourceManager version:", cinfo.getRMBuildVersion() +
+ " on " + cinfo.getRMVersionBuiltOn()).
+ _("Hadoop version:", cinfo.getHadoopBuildVersion() +
+ " on " + cinfo.getHadoopVersionBuiltOn());
html._(InfoBlock.class);
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java Tue Dec 13 23:12:33 2011
@@ -23,6 +23,7 @@ import static org.apache.hadoop.yarn.web
import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -56,23 +57,18 @@ class AppsBlock extends HtmlBlock {
tbody();
int i = 0;
for (RMApp app : list.apps.values()) {
- String appId = app.getApplicationId().toString();
- String trackingUrl = app.getTrackingUrl();
- boolean trackingUrlIsNotReady = trackingUrl == null || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
- String ui = trackingUrlIsNotReady ? "UNASSIGNED" :
- (app.getFinishTime() == 0 ?
- "ApplicationMaster" : "History");
- String percent = String.format("%.1f", app.getProgress() * 100);
+ AppInfo appInfo = new AppInfo(app, true);
+ String percent = String.format("%.1f", appInfo.getProgress());
tbody.
tr().
td().
- br().$title(String.valueOf(app.getApplicationId().getId()))._(). // for sorting
- a(url("app", appId), appId)._().
- td(app.getUser().toString()).
- td(app.getName().toString()).
- td(app.getQueue().toString()).
- td(app.getState().toString()).
- td(app.getFinalApplicationStatus().toString()).
+ br().$title(appInfo.getAppIdNum())._(). // for sorting
+ a(url("app", appInfo.getAppId()), appInfo.getAppId())._().
+ td(appInfo.getUser()).
+ td(appInfo.getName()).
+ td(appInfo.getQueue()).
+ td(appInfo.getState()).
+ td(appInfo.getFinalStatus()).
td().
br().$title(percent)._(). // for sorting
div(_PROGRESSBAR).
@@ -80,9 +76,9 @@ class AppsBlock extends HtmlBlock {
div(_PROGRESSBAR_VALUE).
$style(join("width:", percent, '%'))._()._()._().
td().
- a(trackingUrlIsNotReady ?
- "#" : join("http://", trackingUrl), ui)._().
- td(app.getDiagnostics().toString())._();
+ a(!appInfo.isTrackingUrlReady()?
+ "#" : appInfo.getTrackingUrlPretty(), appInfo.getTrackingUI())._().
+ td(appInfo.getNote())._();
if (list.rendering != Render.HTML && ++i >= 20) break;
}
tbody._()._();
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java Tue Dec 13 23:12:33 2011
@@ -31,6 +31,7 @@ import java.util.concurrent.ConcurrentMa
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.webapp.Controller.RequestContext;
import org.apache.hadoop.yarn.webapp.ToJSON;
import org.apache.hadoop.yarn.webapp.view.JQueryUI.Render;
@@ -54,31 +55,27 @@ class AppsList implements ToJSON {
out.append('[');
boolean first = true;
for (RMApp app : apps.values()) {
+ AppInfo appInfo = new AppInfo(app, false);
if (first) {
first = false;
} else {
out.append(",\n");
}
- String appID = app.getApplicationId().toString();
- String trackingUrl = app.getTrackingUrl();
- boolean trackingUrlIsNotReady = trackingUrl == null
- || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
- String ui = trackingUrlIsNotReady ? "UNASSIGNED"
- : (app.getFinishTime() == 0 ? "ApplicationMaster" : "History");
out.append("[\"");
- appendSortable(out, app.getApplicationId().getId());
- appendLink(out, appID, rc.prefix(), "app", appID).append(_SEP).
- append(escapeHtml(app.getUser().toString())).append(_SEP).
- append(escapeHtml(app.getName().toString())).append(_SEP).
- append(escapeHtml(app.getQueue())).append(_SEP).
- append(app.getState().toString()).append(_SEP).
- append(app.getFinalApplicationStatus().toString()).append(_SEP);
- appendProgressBar(out, app.getProgress()).append(_SEP);
- appendLink(out, ui, rc.prefix(),
- trackingUrlIsNotReady ?
- "#" : "http://", trackingUrl).
+ appendSortable(out, appInfo.getAppIdNum());
+ appendLink(out, appInfo.getAppId(), rc.prefix(), "app",
+ appInfo.getAppId()).append(_SEP).
+ append(escapeHtml(appInfo.getUser())).append(_SEP).
+ append(escapeHtml(appInfo.getName())).append(_SEP).
+ append(escapeHtml(appInfo.getQueue())).append(_SEP).
+ append(appInfo.getState()).append(_SEP).
+ append(appInfo.getFinalStatus()).append(_SEP);
+ appendProgressBar(out, appInfo.getProgress()).append(_SEP);
+ appendLink(out, appInfo.getTrackingUI(), rc.prefix(),
+ !appInfo.isTrackingUrlReady() ?
+ "#" : appInfo.getTrackingUrlPretty()).
append(_SEP).append(escapeJavaScript(escapeHtml(
- app.getDiagnostics().toString()))).
+ appInfo.getNote()))).
append("\"]");
}
out.append(']');
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java Tue Dec 13 23:12:33 2011
@@ -18,19 +18,23 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
-import com.google.inject.Inject;
-import com.google.inject.servlet.RequestScoped;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
+import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.ParentQueue;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CSQueue;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.CapacitySchedulerQueueInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.LI;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import com.google.inject.Inject;
+import com.google.inject.servlet.RequestScoped;
class CapacitySchedulerPage extends RmView {
static final String _Q = ".ui-state-default.ui-corner-all";
@@ -47,22 +51,21 @@ class CapacitySchedulerPage extends RmVi
public static class QueueBlock extends HtmlBlock {
final Parent parent;
+ final CapacitySchedulerInfo sinfo;
@Inject QueueBlock(Parent parent) {
this.parent = parent;
+ sinfo = new CapacitySchedulerInfo(parent.queue);
}
@Override
public void render(Block html) {
UL<Hamlet> ul = html.ul();
- CSQueue parentQueue = parent.queue;
- for (CSQueue queue : parentQueue.getChildQueues()) {
- float used = queue.getUsedCapacity();
- float set = queue.getCapacity();
+ for (CapacitySchedulerQueueInfo info : sinfo.getSubQueues()) {
+ float used = info.getUsedCapacity() / 100;
+ float set = info.getCapacity() / 100;
float delta = Math.abs(set - used) + 0.001f;
- float max = queue.getMaximumCapacity();
- if (max < EPSILON || max > 1f) max = 1f;
- //String absMaxPct = percent(queue.getAbsoluteMaximumCapacity());
+ float max = info.getMaxCapacity() / 100;
LI<UL<Hamlet>> li = ul.
li().
a(_Q).$style(width(max * WIDTH_F)).
@@ -72,14 +75,16 @@ class CapacitySchedulerPage extends RmVi
span().$style(join(width(delta/max), ';',
used > set ? OVER : UNDER, ';',
used > set ? left(set/max) : left(used/max)))._('.')._().
- span(".q", queue.getQueuePath().substring(5))._();
- if (queue instanceof ParentQueue) {
- parent.queue = queue;
+ span(".q", info.getQueuePath().substring(5))._();
+ if (info.getQueue() instanceof ParentQueue) {
+ // this could be optimized better
+ parent.queue = info.getQueue();
li.
_(QueueBlock.class);
}
li._();
}
+
ul._();
}
}
@@ -111,8 +116,9 @@ class CapacitySchedulerPage extends RmVi
} else {
CSQueue root = cs.getRootQueue();
parent.queue = root;
- float used = root.getUsedCapacity();
- float set = root.getCapacity();
+ CapacitySchedulerInfo sinfo = new CapacitySchedulerInfo(parent.queue);
+ float used = sinfo.getUsedCapacity() / 100;
+ float set = sinfo.getCapacity() / 100;
float delta = Math.abs(set - used) + 0.001f;
ul.
li().
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java Tue Dec 13 23:12:33 2011
@@ -18,22 +18,20 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
-import com.google.inject.Inject;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fifo.FifoScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.FifoSchedulerInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.UL;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import org.apache.hadoop.yarn.api.records.QueueInfo;
-import org.apache.hadoop.yarn.api.records.QueueState;
-import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
-import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import com.google.inject.Inject;
class DefaultSchedulerPage extends RmView {
static final String _Q = ".ui-state-default.ui-corner-all";
@@ -44,66 +42,35 @@ class DefaultSchedulerPage extends RmVie
static final float EPSILON = 1e-8f;
static class QueueInfoBlock extends HtmlBlock {
- final RMContext rmContext;
- final FifoScheduler fs;
- final String qName;
- final QueueInfo qInfo;
+ final FifoSchedulerInfo sinfo;
@Inject QueueInfoBlock(RMContext context, ViewContext ctx, ResourceManager rm) {
super(ctx);
- this.rmContext = context;
-
- fs = (FifoScheduler) rm.getResourceScheduler();
- qName = fs.getQueueInfo("",false,false).getQueueName();
- qInfo = fs.getQueueInfo(qName,true,true);
+ sinfo = new FifoSchedulerInfo(rm);
}
@Override public void render(Block html) {
- String minmemoryresource =
- Integer.toString(fs.getMinimumResourceCapability().getMemory());
- String maxmemoryresource =
- Integer.toString(fs.getMaximumResourceCapability().getMemory());
- String qstate = (qInfo.getQueueState() == QueueState.RUNNING) ?
- "Running" :
- (qInfo.getQueueState() == QueueState.STOPPED) ?
- "Stopped" : "Unknown";
-
- int usedNodeMem = 0;
- int availNodeMem = 0;
- int totNodeMem = 0;
- int nodeContainers = 0;
-
- for (RMNode ni : this.rmContext.getRMNodes().values()) {
- SchedulerNodeReport report = fs.getNodeReport(ni.getNodeID());
- usedNodeMem += report.getUsedResource().getMemory();
- availNodeMem += report.getAvailableResource().getMemory();
- totNodeMem += ni.getTotalCapability().getMemory();
- nodeContainers += fs.getNodeReport(ni.getNodeID()).getNumContainers();
- }
-
- info("\'" + qName + "\' Queue Status").
- _("Queue State:" , qstate).
- _("Minimum Queue Memory Capacity:" , minmemoryresource).
- _("Maximum Queue Memory Capacity:" , maxmemoryresource).
- _("Number of Nodes:" , Integer.toString(this.rmContext.getRMNodes().size())).
- _("Used Node Capacity:" , Integer.toString(usedNodeMem)).
- _("Available Node Capacity:" , Integer.toString(availNodeMem)).
- _("Total Node Capacity:" , Integer.toString(totNodeMem)).
- _("Number of Node Containers:" , Integer.toString(nodeContainers));
+ info("\'" + sinfo.getQueueName() + "\' Queue Status").
+ _("Queue State:" , sinfo.getState()).
+ _("Minimum Queue Memory Capacity:" , Integer.toString(sinfo.getMinQueueMemoryCapacity())).
+ _("Maximum Queue Memory Capacity:" , Integer.toString(sinfo.getMaxQueueMemoryCapacity())).
+ _("Number of Nodes:" , Integer.toString(sinfo.getNumNodes())).
+ _("Used Node Capacity:" , Integer.toString(sinfo.getUsedNodeCapacity())).
+ _("Available Node Capacity:" , Integer.toString(sinfo.getAvailNodeCapacity())).
+ _("Total Node Capacity:" , Integer.toString(sinfo.getTotalNodeCapacity())).
+ _("Number of Node Containers:" , Integer.toString(sinfo.getNumContainers()));
html._(InfoBlock.class);
}
}
static class QueuesBlock extends HtmlBlock {
+ final FifoSchedulerInfo sinfo;
final FifoScheduler fs;
- final String qName;
- final QueueInfo qInfo;
@Inject QueuesBlock(ResourceManager rm) {
+ sinfo = new FifoSchedulerInfo(rm);
fs = (FifoScheduler) rm.getResourceScheduler();
- qName = fs.getQueueInfo("",false,false).getQueueName();
- qInfo = fs.getQueueInfo(qName,false,false);
}
@Override
@@ -123,8 +90,8 @@ class DefaultSchedulerPage extends RmVie
span().$style(Q_END)._("100% ")._().
span(".q", "default")._()._();
} else {
- float used = qInfo.getCurrentCapacity();
- float set = qInfo.getCapacity();
+ float used = sinfo.getUsedCapacity();
+ float set = sinfo.getCapacity();
float delta = Math.abs(set - used) + 0.001f;
ul.
li().
@@ -133,7 +100,7 @@ class DefaultSchedulerPage extends RmVie
span().$style(Q_END)._("100%")._().
span().$style(join(width(delta), ';', used > set ? OVER : UNDER,
';', used > set ? left(set) : left(used)))._(".")._().
- span(".q", qName)._().
+ span(".q", sinfo.getQueueName())._().
_(QueueInfoBlock.class)._();
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java Tue Dec 13 23:12:33 2011
@@ -19,11 +19,11 @@
package org.apache.hadoop.yarn.server.resourcemanager.webapp;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.server.resourcemanager.ClusterMetrics;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.QueueMetrics;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.ClusterMetricsInfo;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.UserMetricsInfo;
+
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -36,12 +36,12 @@ import com.google.inject.Inject;
* current user is using on the cluster.
*/
public class MetricsOverviewTable extends HtmlBlock {
- private static final long BYTES_IN_GB = 1024 * 1024 * 1024;
-
+ private static final long BYTES_IN_MB = 1024 * 1024;
+
private final RMContext rmContext;
private final ResourceManager rm;
- @Inject
+ @Inject
MetricsOverviewTable(RMContext context, ResourceManager rm, ViewContext ctx) {
super(ctx);
this.rmContext = context;
@@ -55,22 +55,7 @@ public class MetricsOverviewTable extend
//CSS in the correct spot
html.style(".metrics {margin-bottom:5px}");
- ResourceScheduler rs = rm.getResourceScheduler();
- QueueMetrics metrics = rs.getRootQueueMetrics();
- ClusterMetrics clusterMetrics = ClusterMetrics.getMetrics();
-
- int appsSubmitted = metrics.getAppsSubmitted();
- int reservedGB = metrics.getReservedGB();
- int availableGB = metrics.getAvailableGB();
- int allocatedGB = metrics.getAllocatedGB();
- int containersAllocated = metrics.getAllocatedContainers();
- int totalGB = availableGB + reservedGB + allocatedGB;
-
- int totalNodes = clusterMetrics.getNumNMs();
- int lostNodes = clusterMetrics.getNumLostNMs();
- int unhealthyNodes = clusterMetrics.getUnhealthyNMs();
- int decommissionedNodes = clusterMetrics.getNumDecommisionedNMs();
- int rebootedNodes = clusterMetrics.getNumRebootedNMs();
+ ClusterMetricsInfo clusterMetrics = new ClusterMetricsInfo(this.rm, this.rmContext);
DIV<Hamlet> div = html.div().$class("metrics");
@@ -92,30 +77,23 @@ public class MetricsOverviewTable extend
_().
tbody().$class("ui-widget-content").
tr().
- td(String.valueOf(appsSubmitted)).
- td(String.valueOf(containersAllocated)).
- td(StringUtils.byteDesc(allocatedGB * BYTES_IN_GB)).
- td(StringUtils.byteDesc(totalGB * BYTES_IN_GB)).
- td(StringUtils.byteDesc(reservedGB * BYTES_IN_GB)).
- td().a(url("nodes"),String.valueOf(totalNodes))._().
- td().a(url("nodes/decommissioned"),String.valueOf(decommissionedNodes))._().
- td().a(url("nodes/lost"),String.valueOf(lostNodes))._().
- td().a(url("nodes/unhealthy"),String.valueOf(unhealthyNodes))._().
- td().a(url("nodes/rebooted"),String.valueOf(rebootedNodes))._().
+ td(String.valueOf(clusterMetrics.getAppsSubmitted())).
+ td(String.valueOf(clusterMetrics.getContainersAllocated())).
+ td(StringUtils.byteDesc(clusterMetrics.getAllocatedMB() * BYTES_IN_MB)).
+ td(StringUtils.byteDesc(clusterMetrics.getTotalMB() * BYTES_IN_MB)).
+ td(StringUtils.byteDesc(clusterMetrics.getReservedMB() * BYTES_IN_MB)).
+ td().a(url("nodes"),String.valueOf(clusterMetrics.getTotalNodes()))._().
+ td().a(url("nodes/decommissioned"),String.valueOf(clusterMetrics.getDecommissionedNodes()))._().
+ td().a(url("nodes/lost"),String.valueOf(clusterMetrics.getLostNodes()))._().
+ td().a(url("nodes/unhealthy"),String.valueOf(clusterMetrics.getUnhealthyNodes()))._().
+ td().a(url("nodes/rebooted"),String.valueOf(clusterMetrics.getRebootedNodes()))._().
_().
_()._();
-
+
String user = request().getRemoteUser();
if (user != null) {
- QueueMetrics userMetrics = metrics.getUserMetrics(user);
- if(userMetrics != null) {
- int myAppsSubmitted = userMetrics.getAppsSubmitted();
- int myRunningContainers = userMetrics.getAllocatedContainers();
- int myPendingContainers = userMetrics.getPendingContainers();
- int myReservedContainers = userMetrics.getReservedContainers();
- int myReservedGB = userMetrics.getReservedGB();
- int myPendingGB = userMetrics.getPendingGB();
- int myAllocatedGB = userMetrics.getAllocatedGB();
+ UserMetricsInfo userMetrics = new UserMetricsInfo(this.rm, this.rmContext, user);
+ if (userMetrics.metricsAvailable()) {
div.table("#usermetricsoverview").
thead().$class("ui-widget-header").
tr().
@@ -130,13 +108,13 @@ public class MetricsOverviewTable extend
_().
tbody().$class("ui-widget-content").
tr().
- td(String.valueOf(myAppsSubmitted)).
- td(String.valueOf(myRunningContainers)).
- td(String.valueOf(myPendingContainers)).
- td(String.valueOf(myReservedContainers)).
- td(StringUtils.byteDesc(myAllocatedGB * BYTES_IN_GB)).
- td(StringUtils.byteDesc(myPendingGB * BYTES_IN_GB)).
- td(StringUtils.byteDesc(myReservedGB * BYTES_IN_GB)).
+ td(String.valueOf(userMetrics.getAppsSubmitted())).
+ td(String.valueOf(userMetrics.getRunningContainers())).
+ td(String.valueOf(userMetrics.getPendingContainers())).
+ td(String.valueOf(userMetrics.getReservedContainers())).
+ td(StringUtils.byteDesc(userMetrics.getAllocatedMB() * BYTES_IN_MB)).
+ td(StringUtils.byteDesc(userMetrics.getPendingMB() * BYTES_IN_MB)).
+ td(StringUtils.byteDesc(userMetrics.getReservedMB() * BYTES_IN_MB)).
_().
_()._();
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java Tue Dec 13 23:12:33 2011
@@ -25,14 +25,12 @@ import static org.apache.hadoop.yarn.web
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.NodeHealthStatus;
-import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeState;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
-import org.apache.hadoop.yarn.server.resourcemanager.scheduler.SchedulerNodeReport;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.NodeInfo;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
@@ -45,9 +43,9 @@ import com.google.inject.Inject;
class NodesPage extends RmView {
static class NodesBlock extends HtmlBlock {
- private static final long BYTES_IN_MB = 1024 * 1024;
final RMContext rmContext;
final ResourceManager rm;
+ private static final long BYTES_IN_MB = 1024 * 1024;
@Inject
NodesBlock(RMContext context, ResourceManager rm, ViewContext ctx) {
@@ -59,7 +57,7 @@ class NodesPage extends RmView {
@Override
protected void render(Block html) {
html._(MetricsOverviewTable.class);
-
+
ResourceScheduler sched = rm.getResourceScheduler();
String type = $(NODE_STATE);
TBODY<TABLE<Hamlet>> tbody = html.table("#nodes").
@@ -88,27 +86,18 @@ class NodesPage extends RmView {
continue;
}
}
- NodeId id = ni.getNodeID();
- SchedulerNodeReport report = sched.getNodeReport(id);
- int numContainers = 0;
- int usedMemory = 0;
- int availableMemory = 0;
- if(report != null) {
- numContainers = report.getNumContainers();
- usedMemory = report.getUsedResource().getMemory();
- availableMemory = report.getAvailableResource().getMemory();
- }
-
- NodeHealthStatus health = ni.getNodeHealthStatus();
+ NodeInfo info = new NodeInfo(ni, sched);
+ int usedMemory = (int)info.getUsedMemory();
+ int availableMemory = (int)info.getAvailableMemory();
tbody.tr().
- td(ni.getRackName()).
- td(String.valueOf(ni.getState())).
- td(String.valueOf(ni.getNodeID().toString())).
- td().a("http://" + ni.getHttpAddress(), ni.getHttpAddress())._().
- td(health.getIsNodeHealthy() ? "Healthy" : "Unhealthy").
- td(Times.format(health.getLastHealthReportTime())).
- td(String.valueOf(health.getHealthReport())).
- td(String.valueOf(numContainers)).
+ td(info.getRack()).
+ td(info.getState()).
+ td(info.getNodeId()).
+ td().a("http://" + info.getNodeHTTPAddress(), info.getNodeHTTPAddress())._().
+ td(info.getHealthStatus()).
+ td(Times.format(info.getLastHealthUpdate())).
+ td(info.getHealthReport()).
+ td(String.valueOf(info.getNumContainers())).
td().br().$title(String.valueOf(usedMemory))._().
_(StringUtils.byteDesc(usedMemory * BYTES_IN_MB))._().
td().br().$title(String.valueOf(usedMemory))._().
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java Tue Dec 13 23:12:33 2011
@@ -23,6 +23,7 @@ import static org.apache.hadoop.yarn.uti
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
/**
@@ -41,6 +42,9 @@ public class RMWebApp extends WebApp {
@Override
public void setup() {
+ bind(JAXBContextResolver.class);
+ bind(RMWebServices.class);
+ bind(GenericExceptionHandler.class);
if (rm != null) {
bind(ResourceManager.class).toInstance(rm);
bind(RMContext.class).toInstance(rm.getRMContext());
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java Tue Dec 13 23:12:33 2011
@@ -26,17 +26,16 @@ import javax.servlet.http.HttpServletRes
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationAccessType;
-import org.apache.hadoop.yarn.api.records.Container;
+import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.server.resourcemanager.RMContext;
import org.apache.hadoop.yarn.server.resourcemanager.ResourceManager;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.RMApp;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
+import org.apache.hadoop.yarn.server.resourcemanager.webapp.dao.AppInfo;
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
import org.apache.hadoop.yarn.util.Apps;
-import org.apache.hadoop.yarn.util.ConverterUtils;
import org.apache.hadoop.yarn.util.Times;
import org.apache.hadoop.yarn.webapp.Controller;
import org.apache.hadoop.yarn.webapp.ResponseInfo;
@@ -73,13 +72,14 @@ public class RmController extends Contro
}
ApplicationId appID = Apps.toAppID(aid);
RMContext context = getInstance(RMContext.class);
- RMApp app = context.getRMApps().get(appID);
- if (app == null) {
+ RMApp rmApp = context.getRMApps().get(appID);
+ if (rmApp == null) {
// TODO: handle redirect to jobhistory server
setStatus(HttpServletResponse.SC_NOT_FOUND);
setTitle("Application not found: "+ aid);
return;
}
+ AppInfo app = new AppInfo(rmApp, true);
// Check for the authorization.
String remoteUser = request().getRemoteUser();
@@ -98,32 +98,22 @@ public class RmController extends Contro
}
setTitle(join("Application ", aid));
- String trackingUrl = app.getTrackingUrl();
- boolean trackingUrlIsNotReady = trackingUrl == null
- || trackingUrl.isEmpty() || "N/A".equalsIgnoreCase(trackingUrl);
- String ui = trackingUrlIsNotReady ? "UNASSIGNED" :
- (app.getFinishTime() == 0 ? "ApplicationMaster" : "History");
ResponseInfo info = info("Application Overview").
_("User:", app.getUser()).
_("Name:", app.getName()).
- _("State:", app.getState().toString()).
- _("FinalStatus:", app.getFinalApplicationStatus().toString()).
+ _("State:", app.getState()).
+ _("FinalStatus:", app.getFinalStatus()).
_("Started:", Times.format(app.getStartTime())).
_("Elapsed:", StringUtils.formatTime(
Times.elapsed(app.getStartTime(), app.getFinishTime()))).
- _("Tracking URL:", trackingUrlIsNotReady ?
- "#" : join("http://", trackingUrl), ui).
- _("Diagnostics:", app.getDiagnostics());
- Container masterContainer = app.getCurrentAppAttempt()
- .getMasterContainer();
- if (masterContainer != null) {
- String url = join("http://", masterContainer.getNodeHttpAddress(),
- "/node", "/containerlogs/",
- ConverterUtils.toString(masterContainer.getId()));
- info._("AM container logs:", url, url);
+ _("Tracking URL:", !app.isTrackingUrlReady() ?
+ "#" : app.getTrackingUrlPretty(), app.getTrackingUI()).
+ _("Diagnostics:", app.getNote());
+ if (app.amContainerLogsExist()) {
+ info._("AM container logs:", app.getAMContainerLogs(), app.getAMContainerLogs());
} else {
- info._("AM container logs:", "AM not yet registered with RM");
+ info._("AM container logs:", "");
}
render(AppPage.class);
}
Modified: hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java?rev=1213978&r1=1213977&r2=1213978&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java (original)
+++ hadoop/common/branches/branch-0.23/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java Tue Dec 13 23:12:33 2011
@@ -30,7 +30,9 @@ import org.apache.hadoop.yarn.api.record
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
+import org.apache.hadoop.yarn.api.records.NodeId;
import org.apache.hadoop.yarn.api.records.Resource;
+import org.apache.hadoop.yarn.event.Dispatcher;
import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.AMLauncherEvent;
import org.apache.hadoop.yarn.server.resourcemanager.amlauncher.ApplicationMasterLauncher;
import org.apache.hadoop.yarn.server.resourcemanager.recovery.StoreFactory;
@@ -40,12 +42,16 @@ import org.apache.hadoop.yarn.server.res
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptEventType;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.RMAppAttemptState;
import org.apache.hadoop.yarn.server.resourcemanager.rmapp.attempt.event.RMAppAttemptLaunchFailedEvent;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNode;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEvent;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeEventType;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeImpl;
+import org.apache.hadoop.yarn.server.resourcemanager.rmnode.RMNodeState;
import org.apache.hadoop.yarn.util.Records;
import org.apache.log4j.Level;
import org.apache.log4j.LogManager;
import org.apache.log4j.Logger;
-
public class MockRM extends ResourceManager {
public MockRM() {
@@ -59,48 +65,50 @@ public class MockRM extends ResourceMana
rootLogger.setLevel(Level.DEBUG);
}
- public void waitForState(ApplicationId appId, RMAppState finalState)
+ public void waitForState(ApplicationId appId, RMAppState finalState)
throws Exception {
RMApp app = getRMContext().getRMApps().get(appId);
Assert.assertNotNull("app shouldn't be null", app);
int timeoutSecs = 0;
- while (!finalState.equals(app.getState()) &&
- timeoutSecs++ < 20) {
- System.out.println("App State is : " + app.getState() +
- " Waiting for state : " + finalState);
+ while (!finalState.equals(app.getState()) && timeoutSecs++ < 20) {
+ System.out.println("App State is : " + app.getState()
+ + " Waiting for state : " + finalState);
Thread.sleep(500);
}
System.out.println("App State is : " + app.getState());
- Assert.assertEquals("App state is not correct (timedout)",
- finalState, app.getState());
+ Assert.assertEquals("App state is not correct (timedout)", finalState,
+ app.getState());
}
-
- // get new application id
+
+ // get new application id
public GetNewApplicationResponse getNewAppId() throws Exception {
ClientRMProtocol client = getClientRMService();
- return client.getNewApplication(Records.newRecord(GetNewApplicationRequest.class));
+ return client.getNewApplication(Records
+ .newRecord(GetNewApplicationRequest.class));
}
- //client
+ // client
public RMApp submitApp(int masterMemory) throws Exception {
ClientRMProtocol client = getClientRMService();
- GetNewApplicationResponse resp = client.getNewApplication(Records.newRecord(GetNewApplicationRequest.class));
+ GetNewApplicationResponse resp = client.getNewApplication(Records
+ .newRecord(GetNewApplicationRequest.class));
ApplicationId appId = resp.getApplicationId();
-
- SubmitApplicationRequest req = Records.newRecord(SubmitApplicationRequest.class);
- ApplicationSubmissionContext sub =
- Records.newRecord(ApplicationSubmissionContext.class);
+
+ SubmitApplicationRequest req = Records
+ .newRecord(SubmitApplicationRequest.class);
+ ApplicationSubmissionContext sub = Records
+ .newRecord(ApplicationSubmissionContext.class);
sub.setApplicationId(appId);
sub.setApplicationName("");
sub.setUser("");
- ContainerLaunchContext clc =
- Records.newRecord(ContainerLaunchContext.class);
- Resource capability = Records.newRecord(Resource.class);
+ ContainerLaunchContext clc = Records
+ .newRecord(ContainerLaunchContext.class);
+ Resource capability = Records.newRecord(Resource.class);
capability.setMemory(masterMemory);
clc.setResource(capability);
sub.setAMContainerSpec(clc);
req.setApplicationSubmissionContext(sub);
-
+
client.submitApplication(req);
// make sure app is immediately available after submit
waitForState(appId, RMAppState.ACCEPTED);
@@ -113,28 +121,54 @@ public class MockRM extends ResourceMana
return nm;
}
+ public void sendNodeStarted(MockNM nm) throws Exception {
+ RMNodeImpl node = (RMNodeImpl) getRMContext().getRMNodes().get(
+ nm.getNodeId());
+ node.handle(new RMNodeEvent(nm.getNodeId(), RMNodeEventType.STARTED));
+ }
+
+ public void NMwaitForState(NodeId nodeid, RMNodeState finalState)
+ throws Exception {
+ RMNode node = getRMContext().getRMNodes().get(nodeid);
+ Assert.assertNotNull("node shouldn't be null", node);
+ int timeoutSecs = 0;
+ while (!finalState.equals(node.getState()) && timeoutSecs++ < 20) {
+ System.out.println("Node State is : " + node.getState()
+ + " Waiting for state : " + finalState);
+ Thread.sleep(500);
+ }
+ System.out.println("Node State is : " + node.getState());
+ Assert.assertEquals("Node state is not correct (timedout)", finalState,
+ node.getState());
+ }
+
public void killApp(ApplicationId appId) throws Exception {
ClientRMProtocol client = getClientRMService();
- KillApplicationRequest req = Records.newRecord(KillApplicationRequest.class);
+ KillApplicationRequest req = Records
+ .newRecord(KillApplicationRequest.class);
req.setApplicationId(appId);
client.forceKillApplication(req);
}
- //from AMLauncher
- public MockAM sendAMLaunched(ApplicationAttemptId appAttemptId) throws Exception {
+ // from AMLauncher
+ public MockAM sendAMLaunched(ApplicationAttemptId appAttemptId)
+ throws Exception {
MockAM am = new MockAM(getRMContext(), masterService, appAttemptId);
am.waitForState(RMAppAttemptState.ALLOCATED);
- getRMContext().getDispatcher().getEventHandler().handle(
- new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.LAUNCHED));
+ getRMContext()
+ .getDispatcher()
+ .getEventHandler()
+ .handle(
+ new RMAppAttemptEvent(appAttemptId, RMAppAttemptEventType.LAUNCHED));
return am;
}
-
- public void sendAMLaunchFailed(ApplicationAttemptId appAttemptId) throws Exception {
+ public void sendAMLaunchFailed(ApplicationAttemptId appAttemptId)
+ throws Exception {
MockAM am = new MockAM(getRMContext(), masterService, appAttemptId);
am.waitForState(RMAppAttemptState.ALLOCATED);
- getRMContext().getDispatcher().getEventHandler().handle(
- new RMAppAttemptLaunchFailedEvent(appAttemptId, "Failed"));
+ getRMContext().getDispatcher().getEventHandler()
+ .handle(new RMAppAttemptLaunchFailedEvent(appAttemptId, "Failed"));
}
@Override
@@ -143,8 +177,9 @@ public class MockRM extends ResourceMana
rmAppManager, applicationACLsManager) {
@Override
public void start() {
- //override to not start rpc handler
+ // override to not start rpc handler
}
+
@Override
public void stop() {
// don't do anything
@@ -155,11 +190,12 @@ public class MockRM extends ResourceMana
@Override
protected ResourceTrackerService createResourceTrackerService() {
return new ResourceTrackerService(getRMContext(), nodesListManager,
- this.nmLivelinessMonitor, this.containerTokenSecretManager){
+ this.nmLivelinessMonitor, this.containerTokenSecretManager) {
@Override
public void start() {
- //override to not start rpc handler
+ // override to not start rpc handler
}
+
@Override
public void stop() {
// don't do anything
@@ -173,8 +209,9 @@ public class MockRM extends ResourceMana
this.appTokenSecretManager, scheduler) {
@Override
public void start() {
- //override to not start rpc handler
+ // override to not start rpc handler
}
+
@Override
public void stop() {
// don't do anything
@@ -184,17 +221,18 @@ public class MockRM extends ResourceMana
@Override
protected ApplicationMasterLauncher createAMLauncher() {
- return new ApplicationMasterLauncher(
- this.appTokenSecretManager, this.clientToAMSecretManager,
- getRMContext()) {
+ return new ApplicationMasterLauncher(this.appTokenSecretManager,
+ this.clientToAMSecretManager, getRMContext()) {
@Override
public void start() {
- //override to not start rpc handler
+ // override to not start rpc handler
}
+
@Override
- public void handle(AMLauncherEvent appEvent) {
- //don't do anything
+ public void handle(AMLauncherEvent appEvent) {
+ // don't do anything
}
+
@Override
public void stop() {
// don't do anything
@@ -203,31 +241,31 @@ public class MockRM extends ResourceMana
}
@Override
- protected AdminService createAdminService(
- ClientRMService clientRMService,
+ protected AdminService createAdminService(ClientRMService clientRMService,
ApplicationMasterService applicationMasterService,
ResourceTrackerService resourceTrackerService) {
- return new AdminService(
- getConfig(), scheduler, getRMContext(), this.nodesListManager,
- clientRMService, applicationMasterService, resourceTrackerService){
+ return new AdminService(getConfig(), scheduler, getRMContext(),
+ this.nodesListManager, clientRMService, applicationMasterService,
+ resourceTrackerService) {
@Override
public void start() {
- //override to not start rpc handler
+ // override to not start rpc handler
}
+
@Override
public void stop() {
// don't do anything
}
};
}
-
+
public NodesListManager getNodesListManager() {
return this.nodesListManager;
}
@Override
protected void startWepApp() {
- //override to disable webapp
+ // override to disable webapp
}
}