You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/12/14 00:05:59 UTC

svn commit: r1213975 [1/6] - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/ hadoop-mapreduce-client/hadoop-mapre...

Author: vinodkv
Date: Tue Dec 13 23:05:56 2011
New Revision: 1213975

URL: http://svn.apache.org/viewvc?rev=1213975&view=rev
Log:
MAPREDUCE-2863. Support web services for YARN and MR components. (Thomas Graves via vinodkv)

Added:
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfEntryInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TasksInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/BadRequestException.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/NotFoundException.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/JAXBContextResolver.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/AppsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainerInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/ContainersInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/dao/NodeInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/JAXBContextResolver.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebServices.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/AppsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/CapacitySchedulerQueueInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ClusterInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/ClusterMetricsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/FifoSchedulerInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/NodeInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/NodesInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/SchedulerInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/SchedulerTypeInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/dao/UserMetricsInfo.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/TestRMWebServices.java
Modified:
    hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/WebServer.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/ResourceManager.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AboutBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsBlock.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/AppsList.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/CapacitySchedulerPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/DefaultSchedulerPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/MetricsOverviewTable.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/NodesPage.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RMWebApp.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/main/java/org/apache/hadoop/yarn/server/resourcemanager/webapp/RmController.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/MockRM.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-resourcemanager/src/test/java/org/apache/hadoop/yarn/server/resourcemanager/applicationsmanager/MockAsm.java
    hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/pom.xml
    hadoop/common/trunk/hadoop-mapreduce-project/pom.xml

Modified: hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/CHANGES.txt Tue Dec 13 23:05:56 2011
@@ -85,6 +85,9 @@ Release 0.23.1 - Unreleased
    
    MAPREDUCE-3121. NodeManager should handle disk-failures (Ravi Gummadi via mahadev)
 
+   MAPREDUCE-2863. Support web services for YARN and MR components. (Thomas
+   Graves via vinodkv)
+
   IMPROVEMENTS
 
     MAPREDUCE-3297. Moved log related components into yarn-common so that

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/MRClientService.java Tue Dec 13 23:05:56 2011
@@ -151,7 +151,7 @@ public class MRClientService extends Abs
             + ":" + server.getPort());
     LOG.info("Instantiated MRClientService at " + this.bindAddress);
     try {
-      webApp = WebApps.$for("mapreduce", AppContext.class, appContext).with(conf).
+      webApp = WebApps.$for("mapreduce", AppContext.class, appContext, "ws").with(conf).
           start(new AMWebApp());
     } catch (Exception e) {
       LOG.error("Webapps failed to start. Ignoring for now:", e);

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebApp.java Tue Dec 13 23:05:56 2011
@@ -18,8 +18,9 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import static org.apache.hadoop.yarn.util.StringHelper.*;
+import static org.apache.hadoop.yarn.util.StringHelper.pajoin;
 
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
 import org.apache.hadoop.yarn.webapp.WebApp;
 
 /**
@@ -29,6 +30,9 @@ public class AMWebApp extends WebApp imp
 
   @Override
   public void setup() {
+    bind(JAXBContextResolver.class);
+    bind(GenericExceptionHandler.class);
+    bind(AMWebServices.class);
     route("/", AppController.class);
     route("/app", AppController.class);
     route(pajoin("/job", JOB_ID), AppController.class, "job");

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AMWebServices.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,362 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import java.io.IOException;
+
+import javax.servlet.http.HttpServletRequest;
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response.Status;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+import org.apache.hadoop.yarn.webapp.NotFoundException;
+
+import com.google.inject.Inject;
+
+@Path("/ws/v1/mapreduce")
+public class AMWebServices {
+  private final AppContext appCtx;
+  private final App app;
+  private final Configuration conf;
+
+  @Inject
+  public AMWebServices(final App app, final AppContext context,
+      final Configuration conf) {
+    this.appCtx = context;
+    this.app = app;
+    this.conf = conf;
+  }
+
+  Boolean hasAccess(Job job, HttpServletRequest request) {
+    UserGroupInformation callerUgi = UserGroupInformation
+        .createRemoteUser(request.getRemoteUser());
+    if (!job.checkAccess(callerUgi, JobACL.VIEW_JOB)) {
+      return false;
+    }
+    return true;
+  }
+
+  /**
+   * check for job access.
+   *
+   * @param job
+   *          the job that is being accessed
+   */
+  void checkAccess(Job job, HttpServletRequest request) {
+    if (!hasAccess(job, request)) {
+      throw new WebApplicationException(Status.UNAUTHORIZED);
+    }
+  }
+
+  @GET
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppInfo get() {
+    return getAppInfo();
+  }
+
+  @GET
+  @Path("/info")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppInfo getAppInfo() {
+    return new AppInfo(this.app, this.app.context);
+  }
+
+  @GET
+  @Path("/jobs")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobsInfo getJobs(@Context HttpServletRequest hsr) {
+    JobsInfo allJobs = new JobsInfo();
+    for (Job job : appCtx.getAllJobs().values()) {
+      // getAllJobs only gives you a partial we want a full
+      Job fullJob = appCtx.getJob(job.getID());
+      if (fullJob == null) {
+        continue;
+      }
+      allJobs.add(new JobInfo(fullJob, hasAccess(fullJob, hsr)));
+    }
+    return allJobs;
+  }
+
+  @GET
+  @Path("/jobs/{jobid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobInfo getJob(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    return new JobInfo(job, hasAccess(job, hsr));
+
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobCounterInfo getJobCounters(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    return new JobCounterInfo(this.appCtx, job);
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskCounterInfo getSingleTaskCounters(
+      @Context HttpServletRequest hsr, @PathParam("jobid") String jid,
+      @PathParam("taskid") String tid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = this.appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    return new JobTaskCounterInfo(task);
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/conf")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ConfInfo getJobConf(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    ConfInfo info;
+    try {
+      info = new ConfInfo(job, this.conf);
+    } catch (IOException e) {
+      throw new NotFoundException("unable to load configuration for job: " + jid);
+    }
+    return info;
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TasksInfo getJobTasks(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid, @QueryParam("type") String type) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TasksInfo allTasks = new TasksInfo();
+    for (Task task : job.getTasks().values()) {
+      TaskType ttype = null;
+      if (type != null && !type.isEmpty()) {
+        try {
+          ttype = MRApps.taskType(type);
+        } catch (YarnException e) {
+          throw new BadRequestException("tasktype must be either m or r");        }
+      }
+      if (ttype != null && task.getType() != ttype) {
+        continue;
+      }
+      allTasks.add(new TaskInfo(task));
+    }
+    return allTasks;
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskInfo getJobTask(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    return new TaskInfo(task);
+
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}/attempts")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskAttemptsInfo getJobTaskAttempts(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+    TaskAttemptsInfo attempts = new TaskAttemptsInfo();
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    for (TaskAttempt ta : task.getAttempts().values()) {
+      if (ta != null) {
+        if (task.getType() == TaskType.REDUCE) {
+          attempts.add(new ReduceTaskAttemptInfo(ta, task.getType()));
+        } else {
+          attempts.add(new TaskAttemptInfo(ta, task.getType(), true));
+        }
+      }
+    }
+    return attempts;
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskAttemptInfo getJobTaskAttemptId(@Context HttpServletRequest hsr,
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid,
+      @PathParam("attemptid") String attId) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
+    if (attemptId == null) {
+      throw new NotFoundException("task attempt id " + attId
+          + " not found or invalid");
+    }
+    TaskAttempt ta = task.getAttempt(attemptId);
+    if (ta == null) {
+      throw new NotFoundException("Error getting info on task attempt id "
+          + attId);
+    }
+    if (task.getType() == TaskType.REDUCE) {
+      return new ReduceTaskAttemptInfo(ta, task.getType());
+    } else {
+      return new TaskAttemptInfo(ta, task.getType(), true);
+    }
+  }
+
+  @GET
+  @Path("/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
+      @Context HttpServletRequest hsr, @PathParam("jobid") String jid,
+      @PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = this.appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    checkAccess(job, hsr);
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
+    if (attemptId == null) {
+      throw new NotFoundException("task attempt id " + attId
+          + " not found or invalid");
+    }
+    TaskAttempt ta = task.getAttempt(attemptId);
+    if (ta == null) {
+      throw new NotFoundException("Error getting info on task attempt id "
+          + attId);
+    }
+    return new JobTaskAttemptCounterInfo(ta);
+  }
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/AppController.java Tue Dec 13 23:05:56 2011
@@ -32,6 +32,7 @@ import org.apache.hadoop.mapreduce.JobAC
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.security.UserGroupInformation;
 import org.apache.hadoop.yarn.conf.YarnConfiguration;
@@ -72,13 +73,14 @@ public class AppController extends Contr
    * Render the /info page with an overview of current application.
    */
   public void info() {
+    AppInfo info = new AppInfo(app, app.context);
     info("Application Master Overview").
-      _("Application ID:", $(APP_ID)).
-      _("Application Name:", app.context.getApplicationName()).
-      _("User:", app.context.getUser()).
-      _("Started on:", Times.format(app.context.getStartTime())).
+      _("Application ID:", info.getId()).
+      _("Application Name:", info.getName()).
+      _("User:", info.getUser()).
+      _("Started on:", Times.format(info.getStartTime())).
       _("Elasped: ", org.apache.hadoop.util.StringUtils.formatTime(
-        Times.elapsed(app.context.getStartTime(), 0)));
+          info.getElapsedTime() ));
     render(InfoPage.class);
   }
 

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/ConfBlock.java Tue Dec 13 23:05:56 2011
@@ -22,14 +22,14 @@ import static org.apache.hadoop.mapreduc
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
 
 import java.io.IOException;
-import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.FileContext;
 import org.apache.hadoop.fs.Path;
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -71,11 +71,8 @@ public class ConfBlock extends HtmlBlock
     }
     Path confPath = job.getConfFile();
     try {
-      //Read in the configuration file and put it in a key/value table.
-      FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
-      Configuration jobConf = new Configuration(false);
-      jobConf.addResource(fc.open(confPath));
-    
+      ConfInfo info = new ConfInfo(job, this.conf);
+
       html.div().h3(confPath.toString())._();
       TBODY<TABLE<Hamlet>> tbody = html.
         // Tasks table
@@ -87,10 +84,10 @@ public class ConfBlock extends HtmlBlock
           _().
         _().
       tbody();
-      for(Map.Entry<String, String> entry : jobConf) {
+      for (ConfEntryInfo entry : info.getProperties()) {
         tbody.
           tr().
-            td(entry.getKey()).
+            td(entry.getName()).
             td(entry.getValue()).
           _();
       }

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JAXBContextResolver.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,77 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.mapreduce.v2.app.webapp;
+
+import java.util.Set;
+import java.util.HashSet;
+import java.util.Arrays;
+
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.api.json.JSONJAXBContext;
+import com.google.inject.Singleton;
+
+import javax.ws.rs.ext.ContextResolver;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.AppInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterGroupInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+
+@Singleton
+@Provider
+public class JAXBContextResolver implements ContextResolver<JAXBContext> {
+
+  private JAXBContext context;
+  private final Set<Class> types;
+    
+  // you have to specify all the dao classes here
+  private final Class[] cTypes = {AppInfo.class, CounterInfo.class,
+      JobTaskAttemptCounterInfo.class, JobTaskCounterInfo.class,
+      TaskCounterGroupInfo.class, ConfInfo.class, JobCounterInfo.class,
+      TaskCounterInfo.class, CounterGroupInfo.class, JobInfo.class, 
+      JobsInfo.class, ReduceTaskAttemptInfo.class, TaskAttemptInfo.class,
+      TaskInfo.class, TasksInfo.class, TaskAttemptsInfo.class,
+      ConfEntryInfo.class};
+    
+  public JAXBContextResolver() throws Exception {
+    this.types = new HashSet<Class>(Arrays.asList(cTypes));
+    this.context = new JSONJAXBContext(JSONConfiguration.natural().
+        rootUnwrapping(false).build(), cTypes);
+  }
+    
+  @Override
+  public JAXBContext getContext(Class<?> objectType) {
+    return (types.contains(objectType)) ? context : null;
+  }
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobBlock.java Tue Dec 13 23:05:56 2011
@@ -18,47 +18,32 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.JOB_ID;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._EVEN;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._INFO_WRAP;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._ODD;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._TH;
+
 import java.util.Date;
-import java.util.Map;
 
 import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.app.job.Task;
-import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
 import org.apache.hadoop.util.StringUtils;
-import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 import org.apache.hadoop.yarn.webapp.view.InfoBlock;
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+
+import com.google.inject.Inject;
 
 public class JobBlock extends HtmlBlock {
   final AppContext appContext;
 
-  int runningMapTasks = 0;
-  int pendingMapTasks = 0;
-  int runningReduceTasks = 0;
-  int pendingReduceTasks = 0;
-
-  int newMapAttempts = 0;
-  int runningMapAttempts = 0;
-  int killedMapAttempts = 0;
-  int failedMapAttempts = 0;
-  int successfulMapAttempts = 0;
-  int newReduceAttempts = 0;
-  int runningReduceAttempts = 0;
-  int killedReduceAttempts = 0;
-  int failedReduceAttempts = 0;
-  int successfulReduceAttempts = 0;
-
   @Inject JobBlock(AppContext appctx) {
     appContext = appctx;
   }
@@ -77,23 +62,13 @@ public class JobBlock extends HtmlBlock 
         p()._("Sorry, ", jid, " not found.")._();
       return;
     }
-    JobReport jobReport = job.getReport();
-    String mapPct = percent(jobReport.getMapProgress());
-    String reducePct = percent(jobReport.getReduceProgress());
-    int mapTasks = job.getTotalMaps();
-    int mapTasksComplete = job.getCompletedMaps();
-    int reduceTasks = job.getTotalReduces();
-    int reducesTasksComplete = job.getCompletedReduces();
-    long startTime = jobReport.getStartTime();
-    long finishTime = jobReport.getFinishTime();
-    countTasksAndAttempts(job);
+    JobInfo jinfo = new JobInfo(job, true);
     info("Job Overview").
-        _("Job Name:", job.getName()).
-        _("State:", job.getState()).
-        _("Uberized:", job.isUber()).
-        _("Started:", new Date(startTime)).
-        _("Elapsed:", StringUtils.formatTime(
-            Times.elapsed(startTime, finishTime)));
+        _("Job Name:", jinfo.getName()).
+        _("State:", jinfo.getState()).
+        _("Uberized:", jinfo.isUberized()).
+        _("Started:", new Date(jinfo.getStartTime())).
+        _("Elapsed:", StringUtils.formatTime(jinfo.getElapsedTime()));
     html.
       _(InfoBlock.class).
       div(_INFO_WRAP).
@@ -112,25 +87,25 @@ public class JobBlock extends HtmlBlock 
               a(url("tasks", jid, "m"), "Map")._().
             td().
               div(_PROGRESSBAR).
-                $title(join(mapPct, '%')). // tooltip
+                $title(join(jinfo.getMapProgressPercent(), '%')). // tooltip
                 div(_PROGRESSBAR_VALUE).
-                  $style(join("width:", mapPct, '%'))._()._()._().
-            td(String.valueOf(mapTasks)).
-            td(String.valueOf(pendingMapTasks)).
-            td(String.valueOf(runningMapTasks)).
-            td(String.valueOf(mapTasksComplete))._().
+                  $style(join("width:", jinfo.getMapProgressPercent(), '%'))._()._()._().
+            td(String.valueOf(jinfo.getMapsTotal())).
+            td(String.valueOf(jinfo.getMapsPending())).
+            td(String.valueOf(jinfo.getMapsRunning())).
+            td(String.valueOf(jinfo.getMapsCompleted()))._().
           tr(_EVEN).
             th().
               a(url("tasks", jid, "r"), "Reduce")._().
             td().
               div(_PROGRESSBAR).
-                $title(join(reducePct, '%')). // tooltip
+                $title(join(jinfo.getReduceProgressPercent(), '%')). // tooltip
                 div(_PROGRESSBAR_VALUE).
-                  $style(join("width:", reducePct, '%'))._()._()._().
-            td(String.valueOf(reduceTasks)).
-            td(String.valueOf(pendingReduceTasks)).
-            td(String.valueOf(runningReduceTasks)).
-            td(String.valueOf(reducesTasksComplete))._()
+                  $style(join("width:", jinfo.getReduceProgressPercent(), '%'))._()._()._().
+            td(String.valueOf(jinfo.getReducesTotal())).
+            td(String.valueOf(jinfo.getReducesPending())).
+            td(String.valueOf(jinfo.getReducesRunning())).
+            td(String.valueOf(jinfo.getReducesCompleted()))._()
           ._().
 
         // Attempts table
@@ -145,110 +120,41 @@ public class JobBlock extends HtmlBlock 
         tr(_ODD).
           th("Maps").
           td().a(url("attempts", jid, "m",
-              TaskAttemptStateUI.NEW.toString()), 
-              String.valueOf(newMapAttempts))._().
+              TaskAttemptStateUI.NEW.toString()),
+              String.valueOf(jinfo.getNewMapAttempts()))._().
           td().a(url("attempts", jid, "m",
-              TaskAttemptStateUI.RUNNING.toString()), 
-              String.valueOf(runningMapAttempts))._().
+              TaskAttemptStateUI.RUNNING.toString()),
+              String.valueOf(jinfo.getRunningMapAttempts()))._().
           td().a(url("attempts", jid, "m",
-              TaskAttemptStateUI.FAILED.toString()), 
-              String.valueOf(failedMapAttempts))._().
+              TaskAttemptStateUI.FAILED.toString()),
+              String.valueOf(jinfo.getFailedMapAttempts()))._().
           td().a(url("attempts", jid, "m",
-              TaskAttemptStateUI.KILLED.toString()), 
-              String.valueOf(killedMapAttempts))._().
+              TaskAttemptStateUI.KILLED.toString()),
+              String.valueOf(jinfo.getKilledMapAttempts()))._().
           td().a(url("attempts", jid, "m",
-              TaskAttemptStateUI.SUCCESSFUL.toString()), 
-              String.valueOf(successfulMapAttempts))._().
+              TaskAttemptStateUI.SUCCESSFUL.toString()),
+              String.valueOf(jinfo.getSuccessfulMapAttempts()))._().
         _().
         tr(_EVEN).
           th("Reduces").
           td().a(url("attempts", jid, "r",
-              TaskAttemptStateUI.NEW.toString()), 
-              String.valueOf(newReduceAttempts))._().
+              TaskAttemptStateUI.NEW.toString()),
+              String.valueOf(jinfo.getNewReduceAttempts()))._().
           td().a(url("attempts", jid, "r",
-              TaskAttemptStateUI.RUNNING.toString()), 
-              String.valueOf(runningReduceAttempts))._().
+              TaskAttemptStateUI.RUNNING.toString()),
+              String.valueOf(jinfo.getRunningReduceAttempts()))._().
           td().a(url("attempts", jid, "r",
-              TaskAttemptStateUI.FAILED.toString()), 
-              String.valueOf(failedReduceAttempts))._().
+              TaskAttemptStateUI.FAILED.toString()),
+              String.valueOf(jinfo.getFailedReduceAttempts()))._().
           td().a(url("attempts", jid, "r",
-              TaskAttemptStateUI.KILLED.toString()), 
-              String.valueOf(killedReduceAttempts))._().
+              TaskAttemptStateUI.KILLED.toString()),
+              String.valueOf(jinfo.getKilledReduceAttempts()))._().
           td().a(url("attempts", jid, "r",
-              TaskAttemptStateUI.SUCCESSFUL.toString()), 
-              String.valueOf(successfulReduceAttempts))._().
+              TaskAttemptStateUI.SUCCESSFUL.toString()),
+              String.valueOf(jinfo.getSuccessfulReduceAttempts()))._().
          _().
        _().
      _();
   }
 
-  private void countTasksAndAttempts(Job job) {
-    Map<TaskId, Task> tasks = job.getTasks();
-    for (Task task : tasks.values()) {
-      switch (task.getType()) {
-      case MAP:
-        // Task counts
-        switch (task.getState()) {
-        case RUNNING:
-          ++runningMapTasks;
-          break;
-        case SCHEDULED:
-          ++pendingMapTasks;
-          break;
-        }
-        break;
-      case REDUCE:
-        // Task counts
-        switch (task.getState()) {
-        case RUNNING:
-          ++runningReduceTasks;
-          break;
-        case SCHEDULED:
-          ++pendingReduceTasks;
-          break;
-        }
-        break;
-      }
-
-      // Attempts counts
-      Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
-      for (TaskAttempt attempt : attempts.values()) {
-
-        int newAttempts = 0, running = 0, successful = 0, failed = 0, killed =0;
-
-        if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
-          ++newAttempts;
-        } else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt
-            .getState())) {
-          ++running;
-        } else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
-            .getState())) {
-          ++successful;
-        } else if (TaskAttemptStateUI.FAILED
-            .correspondsTo(attempt.getState())) {
-          ++failed;
-        } else if (TaskAttemptStateUI.KILLED
-            .correspondsTo(attempt.getState())) {
-          ++killed;
-        }
-
-        switch (task.getType()) {
-        case MAP:
-          newMapAttempts += newAttempts;
-          runningMapAttempts += running;
-          successfulMapAttempts += successful;
-          failedMapAttempts += failed;
-          killedMapAttempts += killed;
-          break;
-        case REDUCE:
-          newReduceAttempts += newAttempts;
-          runningReduceAttempts += running;
-          successfulReduceAttempts += successful;
-          failedReduceAttempts += failed;
-          killedReduceAttempts += killed;
-          break;
-        }
-      }
-    }
-  }
 }

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/JobsBlock.java Tue Dec 13 23:05:56 2011
@@ -18,18 +18,19 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
 
-import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
 import org.apache.hadoop.mapreduce.v2.app.AppContext;
 import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobInfo;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+import com.google.inject.Inject;
 
 public class JobsBlock extends HtmlBlock {
   final AppContext appContext;
@@ -54,38 +55,31 @@ public class JobsBlock extends HtmlBlock
             th("Reduces Total").
             th("Reduces Completed")._()._().
         tbody();
-    for (Job job : appContext.getAllJobs().values()) {
-      String jobID = MRApps.toString(job.getID());
-      JobReport report = job.getReport();
-      String mapPct = percent(report.getMapProgress());
-      String mapsTotal = String.valueOf(job.getTotalMaps());
-      String mapsCompleted = String.valueOf(job.getCompletedMaps());
-      String reducePct = percent(report.getReduceProgress());
-      String reduceTotal = String.valueOf(job.getTotalReduces());
-      String reduceCompleted = String.valueOf(job.getCompletedReduces());
+    for (Job j : appContext.getAllJobs().values()) {
+      JobInfo job = new JobInfo(j, false);
       tbody.
         tr().
           td().
-            span().$title(String.valueOf(job.getID().getId()))._(). // for sorting
-            a(url("job", jobID), jobID)._().
-          td(job.getName().toString()).
-          td(job.getState().toString()).
+            span().$title(String.valueOf(job.getId()))._(). // for sorting
+            a(url("job", job.getId()), job.getId())._().
+          td(job.getName()).
+          td(job.getState()).
           td().
-            span().$title(mapPct)._(). // for sorting
+            span().$title(job.getMapProgressPercent())._(). // for sorting
             div(_PROGRESSBAR).
-              $title(join(mapPct, '%')). // tooltip
+              $title(join(job.getMapProgressPercent(), '%')). // tooltip
               div(_PROGRESSBAR_VALUE).
-                $style(join("width:", mapPct, '%'))._()._()._().
-          td(mapsTotal).
-          td(mapsCompleted).
+                $style(join("width:", job.getMapProgressPercent(), '%'))._()._()._().
+          td(String.valueOf(job.getMapsTotal())).
+          td(String.valueOf(job.getMapsCompleted())).
           td().
-            span().$title(reducePct)._(). // for sorting
+            span().$title(job.getReduceProgressPercent())._(). // for sorting
             div(_PROGRESSBAR).
-              $title(join(reducePct, '%')). // tooltip
+              $title(join(job.getReduceProgressPercent(), '%')). // tooltip
               div(_PROGRESSBAR_VALUE).
-                $style(join("width:", reducePct, '%'))._()._()._().
-          td(reduceTotal).
-          td(reduceCompleted)._();
+                $style(join("width:", job.getReduceProgressPercent(), '%'))._()._()._().
+          td(String.valueOf(job.getReducesTotal())).
+          td(String.valueOf(job.getReducesCompleted()))._();
     }
     tbody._()._();
   }

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TaskPage.java Tue Dec 13 23:05:56 2011
@@ -18,23 +18,29 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import java.util.Collection;
+import static org.apache.hadoop.yarn.util.StringHelper.percent;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.DATATABLES_ID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
 
-import com.google.common.base.Joiner;
-import com.google.inject.Inject;
+import java.util.Collection;
 
 import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
-import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.util.ConverterUtils;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TD;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TR;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+
+import com.google.inject.Inject;
 
 public class TaskPage extends AppView {
 
@@ -66,24 +72,26 @@ public class TaskPage extends AppView {
             th(".tsh", "Elapsed").
             th(".note", "Note")._()._().
         tbody();
-      for (TaskAttempt ta : getTaskAttempts()) {
-        String taid = MRApps.toString(ta.getID());
-        String progress = percent(ta.getProgress());
-        ContainerId containerId = ta.getAssignedContainerID();
+      for (TaskAttempt attempt : getTaskAttempts()) {
+        TaskAttemptInfo ta = new TaskAttemptInfo(attempt, true);
+        String taid = ta.getId();
+        String progress = percent(ta.getProgress() / 100);
+        ContainerId containerId = ta.getAssignedContainerId();
 
-        String nodeHttpAddr = ta.getNodeHttpAddress();
-        long startTime = ta.getLaunchTime();
+        String nodeHttpAddr = ta.getNode();
+        long startTime = ta.getStartTime();
         long finishTime = ta.getFinishTime();
-        long elapsed = Times.elapsed(startTime, finishTime);
+        long elapsed = ta.getElapsedTime();
+        String diag = ta.getNote() == null ? "" : ta.getNote();
         TD<TR<TBODY<TABLE<Hamlet>>>> nodeTd = tbody.
           tr().
             td(".id", taid).
             td(".progress", progress).
-            td(".state", ta.getState().toString()).
+            td(".state", ta.getState()).
             td().
               a(".nodelink", url("http://", nodeHttpAddr), nodeHttpAddr);
         if (containerId != null) {
-          String containerIdStr = ConverterUtils.toString(containerId);
+          String containerIdStr = ta.getAssignedContainerIdStr();
           nodeTd._(" ").
             a(".logslink", url("http://", nodeHttpAddr, "node", "containerlogs",
               containerIdStr, app.getJob().getUserName()), "logs");
@@ -92,7 +100,7 @@ public class TaskPage extends AppView {
           td(".ts", Times.format(startTime)).
           td(".ts", Times.format(finishTime)).
           td(".dt", StringUtils.formatTime(elapsed)).
-          td(".note", Joiner.on('\n').join(ta.getDiagnostics()))._();
+          td(".note", diag)._();
       }
       tbody._()._();
     }

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/TasksBlock.java Tue Dec 13 23:05:56 2011
@@ -18,21 +18,24 @@
 
 package org.apache.hadoop.mapreduce.v2.app.webapp;
 
-import com.google.inject.Inject;
+import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.util.StringHelper.percent;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR;
+import static org.apache.hadoop.yarn.webapp.view.JQueryUI._PROGRESSBAR_VALUE;
 
-import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
 import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
 import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
 import org.apache.hadoop.mapreduce.v2.util.MRApps;
 import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.util.Times;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
-import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.*;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
+import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
 
-import static org.apache.hadoop.mapreduce.v2.app.webapp.AMWebApp.*;
-import static org.apache.hadoop.yarn.util.StringHelper.*;
-import static org.apache.hadoop.yarn.webapp.view.JQueryUI.*;
+import com.google.inject.Inject;
 
 public class TasksBlock extends HtmlBlock {
   final App app;
@@ -67,16 +70,16 @@ public class TasksBlock extends HtmlBloc
       if (type != null && task.getType() != type) {
         continue;
       }
-      String tid = MRApps.toString(task.getID());
-      TaskReport report = task.getReport();
-      String pct = percent(report.getProgress());
-      long startTime = report.getStartTime();
-      long finishTime = report.getFinishTime();
-      long elapsed = Times.elapsed(startTime, finishTime);
+      TaskInfo info = new TaskInfo(task);
+      String tid = info.getId();
+      String pct = percent(info.getProgress() / 100);
+      long startTime = info.getStartTime();
+      long finishTime = info.getFinishTime();
+      long elapsed = info.getElapsedTime();
       tbody.
         tr().
           td().
-            br().$title(String.valueOf(task.getID().getId()))._(). // sorting
+            br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
             a(url("task", tid), tid)._().
           td().
             br().$title(pct)._().
@@ -84,7 +87,7 @@ public class TasksBlock extends HtmlBloc
               $title(join(pct, '%')). // tooltip
               div(_PROGRESSBAR_VALUE).
                 $style(join("width:", pct, '%'))._()._()._().
-          td(report.getTaskState().toString()).
+          td(info.getState()).
           td().
             br().$title(String.valueOf(startTime))._().
             _(Times.format(startTime))._().

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/AppInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,70 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.webapp.App;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "info")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AppInfo {
+
+  protected String appId;
+  protected String name;
+  protected String user;
+  protected String hostname;
+  protected long startedOn;
+  protected long elapsedTime;
+
+  public AppInfo() {
+  }
+
+  public AppInfo(App app, AppContext context) {
+    this.appId = context.getApplicationID().toString();
+    this.name = context.getApplicationName().toString();
+    this.user = context.getUser().toString();
+    this.startedOn = context.getStartTime();
+    this.elapsedTime = Times.elapsed(context.getStartTime(), 0);
+  }
+
+  public String getId() {
+    return this.appId;
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public String getUser() {
+    return this.user;
+  }
+
+  public long getStartTime() {
+    return this.startedOn;
+  }
+
+  public long getElapsedTime() {
+    return this.elapsedTime;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfEntryInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfEntryInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfEntryInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfEntryInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ConfEntryInfo {
+
+  protected String name;
+  protected String value;
+
+  public ConfEntryInfo() {
+  }
+
+  public ConfEntryInfo(String key, String value) {
+    this.name = key;
+    this.value = value;
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public String getValue() {
+    return this.value;
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ConfInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,66 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.io.IOException;
+import java.util.ArrayList;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileContext;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ConfInfo {
+
+  protected String path;
+  protected ArrayList<ConfEntryInfo> property;
+
+  public ConfInfo() {
+  }
+
+  public ConfInfo(Job job, Configuration conf) throws IOException {
+
+    Path confPath = job.getConfFile();
+    this.property = new ArrayList<ConfEntryInfo>();
+    // Read in the configuration file and put it in a key/value table.
+    FileContext fc = FileContext.getFileContext(confPath.toUri(), conf);
+    Configuration jobConf = new Configuration(false);
+    jobConf.addResource(fc.open(confPath));
+    this.path = confPath.toString();
+    for (Map.Entry<String, String> entry : jobConf) {
+      this.property.add(new ConfEntryInfo(entry.getKey(), entry.getValue()));
+    }
+
+  }
+
+  public ArrayList<ConfEntryInfo> getProperties() {
+    return this.property;
+  }
+
+  public String getPath() {
+    return this.path;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterGroupInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,54 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlElement;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+
+@XmlRootElement(name = "counterGroup")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class CounterGroupInfo {
+
+  protected String counterGroupName;
+  @XmlElement(name = "counter")
+  protected ArrayList<CounterInfo> counter;
+
+  public CounterGroupInfo() {
+  }
+
+  public CounterGroupInfo(String name, CounterGroup g, CounterGroup mg,
+      CounterGroup rg) {
+    this.counterGroupName = name;
+    this.counter = new ArrayList<CounterInfo>();
+
+    for (Counter c : g.getAllCounters().values()) {
+      Counter mc = mg == null ? null : mg.getCounter(c.getName());
+      Counter rc = rg == null ? null : rg.getCounter(c.getName());
+      CounterInfo cinfo = new CounterInfo(c, mc, rc);
+      this.counter.add(cinfo);
+    }
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/CounterInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class CounterInfo {
+
+  protected String counterName;
+  protected long totalCounterValue;
+  protected long mapCounterValue;
+  protected long reduceCounterValue;
+
+  public CounterInfo() {
+  }
+
+  public CounterInfo(Counter counter, Counter mc, Counter rc) {
+    this.counterName = counter.getName();
+    this.totalCounterValue = counter.getValue();
+    this.mapCounterValue = mc == null ? 0 : mc.getValue();
+    this.reduceCounterValue = rc == null ? 0 : rc.getValue();
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobCounterInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,100 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.impl.JobImpl;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+
+@XmlRootElement(name = "jobCounters")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobCounterInfo {
+
+  @XmlTransient
+  protected Counters total = null;
+  @XmlTransient
+  protected Counters map = null;
+  @XmlTransient
+  protected Counters reduce = null;
+
+  protected String id;
+  protected ArrayList<CounterGroupInfo> counterGroups;
+
+  public JobCounterInfo() {
+  }
+
+  public JobCounterInfo(AppContext ctx, Job job) {
+    getCounters(ctx, job);
+    counterGroups = new ArrayList<CounterGroupInfo>();
+    this.id = MRApps.toString(job.getID());
+
+    int numGroups = 0;
+
+    if (total != null) {
+      for (CounterGroup g : total.getAllCounterGroups().values()) {
+        if (g != null) {
+          CounterGroup mg = map == null ? null : map.getCounterGroup(g
+              .getName());
+          CounterGroup rg = reduce == null ? null : reduce.getCounterGroup(g
+              .getName());
+          ++numGroups;
+
+          CounterGroupInfo cginfo = new CounterGroupInfo(g.getName(), g, mg, rg);
+          counterGroups.add(cginfo);
+        }
+      }
+    }
+  }
+
+  private void getCounters(AppContext ctx, Job job) {
+    total = JobImpl.newCounters();
+    if (job == null) {
+      return;
+    }
+    map = JobImpl.newCounters();
+    reduce = JobImpl.newCounters();
+    // Get all types of counters
+    Map<TaskId, Task> tasks = job.getTasks();
+    for (Task t : tasks.values()) {
+      Counters counters = t.getCounters();
+      JobImpl.incrAllCounters(total, counters);
+      switch (t.getType()) {
+      case MAP:
+        JobImpl.incrAllCounters(map, counters);
+        break;
+      case REDUCE:
+        JobImpl.incrAllCounters(reduce, counters);
+        break;
+      }
+    }
+  }
+
+}