You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/12/14 00:05:59 UTC

svn commit: r1213975 [3/6] - in /hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/ hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/ hadoop-mapreduce-client/hadoop-mapre...

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebServices.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,469 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import java.io.IOException;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
+import org.apache.hadoop.mapreduce.v2.api.records.JobId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.AppContext;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.YarnException;
+import org.apache.hadoop.yarn.webapp.BadRequestException;
+import org.apache.hadoop.yarn.webapp.NotFoundException;
+import org.apache.hadoop.yarn.webapp.WebApp;
+
+import com.google.inject.Inject;
+
+@Path("/ws/v1/history")
+public class HsWebServices {
+  private final AppContext appCtx;
+  private WebApp webapp;
+  private final Configuration conf;
+
+  @Context
+  UriInfo uriInfo;
+
+  @Inject
+  public HsWebServices(final AppContext appCtx, final Configuration conf,
+      final WebApp webapp) {
+    this.appCtx = appCtx;
+    this.conf = conf;
+    this.webapp = webapp;
+  }
+
+  @GET
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public HistoryInfo get() {
+    return getHistoryInfo();
+  }
+
+  @GET
+  @Path("/info")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public HistoryInfo getHistoryInfo() {
+    return new HistoryInfo();
+  }
+
+  @GET
+  @Path("/mapreduce/jobs")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobsInfo getJobs(@QueryParam("user") String userQuery,
+      @QueryParam("limit") String count,
+      @QueryParam("queue") String queueQuery,
+      @QueryParam("startedTimeBegin") String startedBegin,
+      @QueryParam("startedTimeEnd") String startedEnd,
+      @QueryParam("finishedTimeBegin") String finishBegin,
+      @QueryParam("finishedTimeEnd") String finishEnd) {
+    JobsInfo allJobs = new JobsInfo();
+    long num = 0;
+    boolean checkCount = false;
+    boolean checkStart = false;
+    boolean checkEnd = false;
+    long countNum = 0;
+
+    // set values suitable in case both of begin/end not specified
+    long sBegin = 0;
+    long sEnd = Long.MAX_VALUE;
+    long fBegin = 0;
+    long fEnd = Long.MAX_VALUE;
+
+    if (count != null && !count.isEmpty()) {
+      checkCount = true;
+      try {
+        countNum = Long.parseLong(count);
+      } catch (NumberFormatException e) {
+        throw new BadRequestException(e.getMessage());
+      }
+      if (countNum <= 0) {
+        throw new BadRequestException("limit value must be greater then 0");
+      }
+    }
+
+    if (startedBegin != null && !startedBegin.isEmpty()) {
+      checkStart = true;
+      try {
+        sBegin = Long.parseLong(startedBegin);
+      } catch (NumberFormatException e) {
+        throw new BadRequestException(e.getMessage());
+      }
+      if (sBegin < 0) {
+        throw new BadRequestException("startedTimeBegin must be greater than 0");
+      }
+    }
+    if (startedEnd != null && !startedEnd.isEmpty()) {
+      checkStart = true;
+      try {
+        sEnd = Long.parseLong(startedEnd);
+      } catch (NumberFormatException e) {
+        throw new BadRequestException(e.getMessage());
+      }
+      if (sEnd < 0) {
+        throw new BadRequestException("startedTimeEnd must be greater than 0");
+      }
+    }
+    if (sBegin > sEnd) {
+      throw new BadRequestException(
+          "startedTimeEnd must be greater than startTimeBegin");
+    }
+
+    if (finishBegin != null && !finishBegin.isEmpty()) {
+      checkEnd = true;
+      try {
+        fBegin = Long.parseLong(finishBegin);
+      } catch (NumberFormatException e) {
+        throw new BadRequestException(e.getMessage());
+      }
+      if (fBegin < 0) {
+        throw new BadRequestException("finishTimeBegin must be greater than 0");
+      }
+    }
+    if (finishEnd != null && !finishEnd.isEmpty()) {
+      checkEnd = true;
+      try {
+        fEnd = Long.parseLong(finishEnd);
+      } catch (NumberFormatException e) {
+        throw new BadRequestException(e.getMessage());
+      }
+      if (fEnd < 0) {
+        throw new BadRequestException("finishTimeEnd must be greater than 0");
+      }
+    }
+    if (fBegin > fEnd) {
+      throw new BadRequestException(
+          "finishTimeEnd must be greater than finishTimeBegin");
+    }
+
+    for (Job job : appCtx.getAllJobs().values()) {
+      if (checkCount && num == countNum) {
+        break;
+      }
+
+      // getAllJobs only gives you a partial we want a full
+      Job fullJob = appCtx.getJob(job.getID());
+      if (fullJob == null) {
+        continue;
+      }
+
+      JobInfo jobInfo = new JobInfo(fullJob);
+      // can't really validate queue is a valid one since queues could change
+      if (queueQuery != null && !queueQuery.isEmpty()) {
+        if (!jobInfo.getQueueName().equals(queueQuery)) {
+          continue;
+        }
+      }
+
+      if (userQuery != null && !userQuery.isEmpty()) {
+        if (!jobInfo.getName().equals(userQuery)) {
+          continue;
+        }
+      }
+
+      if (checkStart
+          && (jobInfo.getStartTime() < sBegin || jobInfo.getStartTime() > sEnd)) {
+        continue;
+      }
+      if (checkEnd
+          && (jobInfo.getFinishTime() < fBegin || jobInfo.getFinishTime() > fEnd)) {
+        continue;
+      }
+
+      allJobs.add(jobInfo);
+      num++;
+    }
+    return allJobs;
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobInfo getJob(@PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    return new JobInfo(job);
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/attempts")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AMAttemptsInfo getJobAttempts(@PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    AMAttemptsInfo amAttempts = new AMAttemptsInfo();
+    for (AMInfo amInfo : job.getAMInfos()) {
+      AMAttemptInfo attempt = new AMAttemptInfo(amInfo, MRApps.toString(job
+          .getID()), job.getUserName(), uriInfo.getBaseUri().toString(),
+          webapp.name());
+      amAttempts.add(attempt);
+    }
+    return amAttempts;
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobCounterInfo getJobCounters(@PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    return new JobCounterInfo(this.appCtx, job);
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskCounterInfo getSingleTaskCounters(
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = this.appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    return new JobTaskCounterInfo(task);
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/conf")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ConfInfo getJobConf(@PathParam("jobid") String jid) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    ConfInfo info;
+    try {
+      info = new ConfInfo(job, this.conf);
+    } catch (IOException e) {
+      throw new NotFoundException("unable to load configuration for job: "
+          + jid);
+    }
+
+    return info;
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TasksInfo getJobTasks(@PathParam("jobid") String jid,
+      @QueryParam("type") String type) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TasksInfo allTasks = new TasksInfo();
+    for (Task task : job.getTasks().values()) {
+      TaskType ttype = null;
+      if (type != null && !type.isEmpty()) {
+        try {
+          ttype = MRApps.taskType(type);
+        } catch (YarnException e) {
+          throw new BadRequestException("tasktype must be either m or r");
+        }
+      }
+      if (ttype != null && task.getType() != ttype) {
+        continue;
+      }
+      allTasks.add(new TaskInfo(task));
+    }
+    return allTasks;
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskInfo getJobTask(@PathParam("jobid") String jid,
+      @PathParam("taskid") String tid) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    return new TaskInfo(task);
+
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskAttemptsInfo getJobTaskAttempts(@PathParam("jobid") String jid,
+      @PathParam("taskid") String tid) {
+    TaskAttemptsInfo attempts = new TaskAttemptsInfo();
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    for (TaskAttempt ta : task.getAttempts().values()) {
+      if (ta != null) {
+        if (task.getType() == TaskType.REDUCE) {
+          attempts.add(new ReduceTaskAttemptInfo(ta, task.getType()));
+        } else {
+          attempts.add(new TaskAttemptInfo(ta, task.getType(), false));
+        }
+      }
+    }
+    return attempts;
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public TaskAttemptInfo getJobTaskAttemptId(@PathParam("jobid") String jid,
+      @PathParam("taskid") String tid, @PathParam("attemptid") String attId) {
+    Job job = this.appCtx.getJob(MRApps.toJobID(jid));
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
+    if (attemptId == null) {
+      throw new NotFoundException("task attempt id " + attId
+          + " not found or invalid");
+    }
+    TaskAttempt ta = task.getAttempt(attemptId);
+    if (ta == null) {
+      throw new NotFoundException("Error getting info on task attempt id "
+          + attId);
+    }
+    if (task.getType() == TaskType.REDUCE) {
+      return new ReduceTaskAttemptInfo(ta, task.getType());
+    } else {
+      return new TaskAttemptInfo(ta, task.getType(), false);
+    }
+  }
+
+  @GET
+  @Path("/mapreduce/jobs/{jobid}/tasks/{taskid}/attempts/{attemptid}/counters")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public JobTaskAttemptCounterInfo getJobTaskAttemptIdCounters(
+      @PathParam("jobid") String jid, @PathParam("taskid") String tid,
+      @PathParam("attemptid") String attId) {
+    JobId jobId = MRApps.toJobID(jid);
+    if (jobId == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    Job job = this.appCtx.getJob(jobId);
+    if (job == null) {
+      throw new NotFoundException("job, " + jid + ", is not found");
+    }
+    TaskId taskID = MRApps.toTaskID(tid);
+    if (taskID == null) {
+      throw new NotFoundException("taskid " + tid + " not found or invalid");
+    }
+    Task task = job.getTask(taskID);
+    if (task == null) {
+      throw new NotFoundException("task not found with id " + tid);
+    }
+    TaskAttemptId attemptId = MRApps.toTaskAttemptID(attId);
+    if (attemptId == null) {
+      throw new NotFoundException("task attempt id " + attId
+          + " not found or invalid");
+    }
+    TaskAttempt ta = task.getAttempt(attemptId);
+    if (ta == null) {
+      throw new NotFoundException("Error getting info on task attempt id "
+          + attId);
+    }
+    return new JobTaskAttemptCounterInfo(ta);
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/JAXBContextResolver.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,78 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp;
+
+import com.google.inject.Singleton;
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.api.json.JSONJAXBContext;
+
+import java.util.Arrays;
+import java.util.HashSet;
+import java.util.Set;
+
+import javax.ws.rs.ext.ContextResolver;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterGroupInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.CounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskAttemptCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.JobTaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterGroupInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskCounterInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TasksInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptsInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobsInfo;
+
+@Singleton
+@Provider
+public class JAXBContextResolver implements ContextResolver<JAXBContext> {
+
+  private JAXBContext context;
+  private final Set<Class> types;
+
+  // you have to specify all the dao classes here
+  private final Class[] cTypes = { HistoryInfo.class, JobInfo.class,
+      JobsInfo.class, TasksInfo.class, TaskAttemptsInfo.class, ConfInfo.class,
+      CounterInfo.class, JobTaskCounterInfo.class,
+      JobTaskAttemptCounterInfo.class, 
+      TaskCounterInfo.class, JobCounterInfo.class, ReduceTaskAttemptInfo.class,
+      TaskAttemptInfo.class, TaskAttemptsInfo.class, CounterGroupInfo.class,
+      TaskCounterGroupInfo.class, 
+      AMAttemptInfo.class, AMAttemptsInfo.class};
+
+  public JAXBContextResolver() throws Exception {
+    this.types = new HashSet<Class>(Arrays.asList(cTypes));
+    this.context = new JSONJAXBContext(JSONConfiguration.natural()
+        .rootUnwrapping(false).build(), cTypes);
+  }
+
+  @Override
+  public JAXBContext getContext(Class<?> objectType) {
+    return (types.contains(objectType)) ? context : null;
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,96 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.hs.webapp.dao;
+
+import static org.apache.hadoop.yarn.util.StringHelper.join;
+import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.AMInfo;
+import org.apache.hadoop.yarn.api.records.NodeId;
+import org.apache.hadoop.yarn.util.BuilderUtils;
+
+@XmlRootElement(name = "amAttempt")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AMAttemptInfo {
+
+  protected String nodeHttpAddress;
+  protected String nodeId;
+  protected int id;
+  protected long startTime;
+  protected String containerId;
+  protected String logsLink;
+
+  @XmlTransient
+  protected String shortLogsLink;
+
+  public AMAttemptInfo() {
+  }
+
+  public AMAttemptInfo(AMInfo amInfo, String jobId, String user, String host,
+      String pathPrefix) {
+    this.nodeHttpAddress = amInfo.getNodeManagerHost() + ":"
+        + amInfo.getNodeManagerHttpPort();
+    NodeId nodeId = BuilderUtils.newNodeId(amInfo.getNodeManagerHost(),
+        amInfo.getNodeManagerPort());
+    this.nodeId = nodeId.toString();
+    this.id = amInfo.getAppAttemptId().getAttemptId();
+    this.startTime = amInfo.getStartTime();
+    this.containerId = amInfo.getContainerId().toString();
+    this.logsLink = join(
+        host,
+        pathPrefix,
+        ujoin("logs", nodeId.toString(), amInfo.getContainerId().toString(),
+            jobId, user));
+    this.shortLogsLink = ujoin("logs", nodeId.toString(), amInfo
+        .getContainerId().toString(), jobId, user);
+  }
+
+  public String getNodeHttpAddress() {
+    return this.nodeHttpAddress;
+  }
+
+  public String getNodeId() {
+    return this.nodeId;
+  }
+
+  public int getAttemptId() {
+    return this.id;
+  }
+
+  public long getStartTime() {
+    return this.startTime;
+  }
+
+  public String getContainerId() {
+    return this.containerId;
+  }
+
+  public String getLogsLink() {
+    return this.logsLink;
+  }
+
+  public String getShortLogsLink() {
+    return this.shortLogsLink;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/AMAttemptsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by joblicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.hs.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "attempts")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class AMAttemptsInfo {
+
+  protected ArrayList<AMAttemptInfo> attempt = new ArrayList<AMAttemptInfo>();
+
+  public AMAttemptsInfo() {
+  } // JAXB needs this
+
+  public void add(AMAttemptInfo info) {
+    this.attempt.add(info);
+  }
+
+  public ArrayList<AMAttemptInfo> getAttempts() {
+    return this.attempt;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/HistoryInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,53 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.hs.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.util.VersionInfo;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class HistoryInfo {
+
+  protected String hadoopVersion;
+  protected String hadoopBuildVersion;
+  protected String hadoopVersionBuiltOn;
+
+  public HistoryInfo() {
+    this.hadoopVersion = VersionInfo.getVersion();
+    this.hadoopBuildVersion = VersionInfo.getBuildVersion();
+    this.hadoopVersionBuiltOn = VersionInfo.getDate();
+  }
+
+  public String getHadoopVersion() {
+    return this.hadoopVersion;
+  }
+
+  public String getHadoopBuildVersion() {
+    return this.hadoopBuildVersion;
+  }
+
+  public String getHadoopVersionBuiltOn() {
+    return this.hadoopVersionBuiltOn;
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,295 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.hs.webapp.dao;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
+import org.apache.hadoop.mapreduce.v2.hs.CompletedJob;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
+import org.apache.hadoop.security.authorize.AccessControlList;
+
+@XmlRootElement(name = "job")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobInfo {
+
+  protected long startTime;
+  protected long finishTime;
+  protected String id;
+  protected String name;
+  protected String queue;
+  protected String user;
+  protected String state;
+  protected int mapsTotal;
+  protected int mapsCompleted;
+  protected int reducesTotal;
+  protected int reducesCompleted;
+  protected boolean uberized;
+  protected String diagnostics;
+  protected long avgMapTime = 0;
+  protected long avgReduceTime = 0;
+  protected long avgShuffleTime = 0;
+  protected long avgMergeTime = 0;
+  protected int failedReduceAttempts = 0;
+  protected int killedReduceAttempts = 0;
+  protected int successfulReduceAttempts = 0;
+  protected int failedMapAttempts = 0;
+  protected int killedMapAttempts = 0;
+  protected int successfulMapAttempts = 0;
+  protected ArrayList<ConfEntryInfo> acls;
+
+  @XmlTransient
+  protected int numMaps;
+  @XmlTransient
+  protected int numReduces;
+
+  public JobInfo() {
+  }
+
+  public JobInfo(Job job) {
+    this.id = MRApps.toString(job.getID());
+    JobReport report = job.getReport();
+    countTasksAndAttempts(job);
+    this.mapsTotal = job.getTotalMaps();
+    this.mapsCompleted = job.getCompletedMaps();
+    this.reducesTotal = job.getTotalReduces();
+    this.reducesCompleted = job.getCompletedReduces();
+    this.startTime = report.getStartTime();
+    this.finishTime = report.getFinishTime();
+    this.name = job.getName().toString();
+    this.queue = job.getQueueName();
+    this.user = job.getUserName();
+    this.state = job.getState().toString();
+    this.uberized = job.isUber();
+    List<String> diagnostics = job.getDiagnostics();
+    if (diagnostics != null && !diagnostics.isEmpty()) {
+      StringBuffer b = new StringBuffer();
+      for (String diag : diagnostics) {
+        b.append(diag);
+      }
+      this.diagnostics = b.toString();
+    }
+
+    this.acls = new ArrayList<ConfEntryInfo>();
+    if (job instanceof CompletedJob) {
+      Map<JobACL, AccessControlList> allacls = job.getJobACLs();
+      if (allacls != null) {
+        for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
+          this.acls.add(new ConfEntryInfo(entry.getKey().getAclName(), entry
+              .getValue().getAclString()));
+        }
+      }
+    }
+  }
+
+  public long getNumMaps() {
+    return numMaps;
+  }
+
+  public long getNumReduces() {
+    return numReduces;
+  }
+
+  public long getAvgMapTime() {
+    return avgMapTime;
+  }
+
+  public long getAvgReduceTime() {
+    return avgReduceTime;
+  }
+
+  public long getAvgShuffleTime() {
+    return avgShuffleTime;
+  }
+
+  public long getAvgMergeTime() {
+    return avgMergeTime;
+  }
+
+  public long getFailedReduceAttempts() {
+    return failedReduceAttempts;
+  }
+
+  public long getKilledReduceAttempts() {
+    return killedReduceAttempts;
+  }
+
+  public long getSuccessfulReduceAttempts() {
+    return successfulReduceAttempts;
+  }
+
+  public long getFailedMapAttempts() {
+    return failedMapAttempts;
+  }
+
+  public long getKilledMapAttempts() {
+    return killedMapAttempts;
+  }
+
+  public long getSuccessfulMapAttempts() {
+    return successfulMapAttempts;
+  }
+
+  public ArrayList<ConfEntryInfo> getAcls() {
+    return acls;
+  }
+
+  public int getReducesCompleted() {
+    return this.reducesCompleted;
+  }
+
+  public int getReducesTotal() {
+    return this.reducesTotal;
+  }
+
+  public int getMapsCompleted() {
+    return this.mapsCompleted;
+  }
+
+  public int getMapsTotal() {
+    return this.mapsTotal;
+  }
+
+  public String getState() {
+    return this.state;
+  }
+
+  public String getUserName() {
+    return this.user;
+  }
+
+  public String getName() {
+    return this.name;
+  }
+
+  public String getQueueName() {
+    return this.queue;
+  }
+
+  public String getId() {
+    return this.id;
+  }
+
+  public long getStartTime() {
+    return this.startTime;
+  }
+
+  public long getFinishTime() {
+    return this.finishTime;
+  }
+
+  public boolean isUber() {
+    return this.uberized;
+  }
+
+  public String getDiagnostics() {
+    return this.diagnostics;
+  }
+
+  /**
+   * Go through a job and update the member variables with counts for
+   * information to output in the page.
+   *
+   * @param job
+   *          the job to get counts for.
+   */
+  private void countTasksAndAttempts(Job job) {
+    numReduces = 0;
+    numMaps = 0;
+    final Map<TaskId, Task> tasks = job.getTasks();
+    if (tasks == null) {
+      return;
+    }
+    for (Task task : tasks.values()) {
+      // Attempts counts
+      Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
+      int successful, failed, killed;
+      for (TaskAttempt attempt : attempts.values()) {
+
+        successful = 0;
+        failed = 0;
+        killed = 0;
+        if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
+          // Do Nothing
+        } else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt.getState())) {
+          // Do Nothing
+        } else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
+            .getState())) {
+          ++successful;
+        } else if (TaskAttemptStateUI.FAILED.correspondsTo(attempt.getState())) {
+          ++failed;
+        } else if (TaskAttemptStateUI.KILLED.correspondsTo(attempt.getState())) {
+          ++killed;
+        }
+
+        switch (task.getType()) {
+        case MAP:
+          successfulMapAttempts += successful;
+          failedMapAttempts += failed;
+          killedMapAttempts += killed;
+          if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
+            numMaps++;
+            avgMapTime += (attempt.getFinishTime() - attempt.getLaunchTime());
+          }
+          break;
+        case REDUCE:
+          successfulReduceAttempts += successful;
+          failedReduceAttempts += failed;
+          killedReduceAttempts += killed;
+          if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
+            numReduces++;
+            avgShuffleTime += (attempt.getShuffleFinishTime() - attempt
+                .getLaunchTime());
+            avgMergeTime += attempt.getSortFinishTime()
+                - attempt.getLaunchTime();
+            avgReduceTime += (attempt.getFinishTime() - attempt
+                .getShuffleFinishTime());
+          }
+          break;
+        }
+      }
+    }
+
+    if (numMaps > 0) {
+      avgMapTime = avgMapTime / numMaps;
+    }
+
+    if (numReduces > 0) {
+      avgReduceTime = avgReduceTime / numReduces;
+      avgShuffleTime = avgShuffleTime / numReduces;
+      avgMergeTime = avgMergeTime / numReduces;
+    }
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/dao/JobsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by joblicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.hs.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "jobs")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobsInfo {
+
+  protected ArrayList<JobInfo> job = new ArrayList<JobInfo>();
+
+  public JobsInfo() {
+  } // JAXB needs this
+
+  public void add(JobInfo jobInfo) {
+    this.job.add(jobInfo);
+  }
+
+  public ArrayList<JobInfo> getJobs() {
+    return this.job;
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/pom.xml Tue Dec 13 23:05:56 2011
@@ -238,7 +238,7 @@
     <dependency>
       <groupId>com.google.inject.extensions</groupId>
       <artifactId>guice-servlet</artifactId>
-      <version>2.0</version>
+      <version>3.0</version>
     </dependency>
     <dependency>
       <groupId>junit</groupId>

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/BadRequestException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/BadRequestException.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/BadRequestException.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/BadRequestException.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,39 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.webapp;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response.Status;
+
+public class BadRequestException extends WebApplicationException {
+
+  private static final long serialVersionUID = 1L;
+
+  public BadRequestException() {
+    super(Status.BAD_REQUEST);
+  }
+
+  public BadRequestException(java.lang.Throwable cause) {
+    super(cause, Status.BAD_REQUEST);
+  }
+
+  public BadRequestException(String msg) {
+    super(new Exception(msg), Status.BAD_REQUEST);
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/DefaultWrapperServlet.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,50 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.webapp;
+
+import java.io.IOException;
+
+import javax.servlet.RequestDispatcher;
+import javax.servlet.ServletException;
+import javax.servlet.http.HttpServlet;
+import javax.servlet.http.HttpServletRequest;
+import javax.servlet.http.HttpServletRequestWrapper;
+import javax.servlet.http.HttpServletResponse;
+
+import com.google.inject.Singleton;
+
+@Singleton
+public class DefaultWrapperServlet extends HttpServlet {
+
+private static final long serialVersionUID = 1L;
+
+public void doGet(HttpServletRequest req, HttpServletResponse resp)
+throws ServletException, IOException {
+  RequestDispatcher rd = getServletContext().getNamedDispatcher("default");
+
+  HttpServletRequest wrapped = new HttpServletRequestWrapper(req) {
+    public String getServletPath() {
+    return "";
+    }
+  };
+
+  rd.forward(wrapped, resp);
+  }
+
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/GenericExceptionHandler.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,115 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.yarn.webapp;
+
+import java.io.FileNotFoundException;
+import java.io.IOException;
+import java.util.Map;
+import java.util.TreeMap;
+
+import javax.servlet.http.HttpServletResponse;
+import javax.ws.rs.core.Context;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.Response;
+import javax.ws.rs.ext.ExceptionMapper;
+import javax.ws.rs.ext.Provider;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.ipc.RemoteException;
+import org.apache.hadoop.security.authorize.AuthorizationException;
+import org.mortbay.util.ajax.JSON;
+
+import com.google.inject.Singleton;
+
+/**
+ * Handle webservices jersey exceptions and create json response in the format:
+ * { "RemoteException" :
+ *   {
+ *     "exception" : <exception type>,
+ *     "javaClassName" : <classname of exception>,
+ *     "message" : <error message from exception>
+ *   }
+ * }
+ */
+@Singleton
+@Provider
+public class GenericExceptionHandler implements ExceptionMapper<Exception> {
+  public static final Log LOG = LogFactory
+      .getLog(GenericExceptionHandler.class);
+
+  private @Context
+  HttpServletResponse response;
+
+  @Override
+  public Response toResponse(Exception e) {
+    if (LOG.isTraceEnabled()) {
+      LOG.trace("GOT EXCEPITION", e);
+    }
+    // Don't catch this as filter forward on 404
+    // (ServletContainer.FEATURE_FILTER_FORWARD_ON_404)
+    // won't work and the web UI won't work!
+    if (e instanceof com.sun.jersey.api.NotFoundException) {
+      return ((com.sun.jersey.api.NotFoundException) e).getResponse();
+    }
+    // clear content type
+    response.setContentType(null);
+
+    // Convert exception
+    if (e instanceof RemoteException) {
+      e = ((RemoteException) e).unwrapRemoteException();
+    }
+
+    // Map response status
+    final Response.Status s;
+    if (e instanceof SecurityException) {
+      s = Response.Status.UNAUTHORIZED;
+    } else if (e instanceof AuthorizationException) {
+      s = Response.Status.UNAUTHORIZED;
+    } else if (e instanceof FileNotFoundException) {
+      s = Response.Status.NOT_FOUND;
+    } else if (e instanceof NotFoundException) {
+      s = Response.Status.NOT_FOUND;
+    } else if (e instanceof IOException) {
+      s = Response.Status.NOT_FOUND;
+    } else if (e instanceof UnsupportedOperationException) {
+      s = Response.Status.BAD_REQUEST;
+    } else if (e instanceof IllegalArgumentException) {
+      s = Response.Status.BAD_REQUEST;
+    } else if (e instanceof NumberFormatException) {
+      s = Response.Status.BAD_REQUEST;
+    } else if (e instanceof BadRequestException) {
+      s = Response.Status.BAD_REQUEST;
+    } else {
+      LOG.warn("INTERNAL_SERVER_ERROR", e);
+      s = Response.Status.INTERNAL_SERVER_ERROR;
+    }
+
+    // convert to json
+    final Map<String, Object> m = new TreeMap<String, Object>();
+    m.put("exception", e.getClass().getSimpleName());
+    m.put("message", e.getMessage());
+    m.put("javaClassName", e.getClass().getName());
+    final Map<String, Object> m2 = new TreeMap<String, Object>();
+    m2.put(RemoteException.class.getSimpleName(), m);
+    final String js = JSON.toString(m2);
+
+    return Response.status(s).type(MediaType.APPLICATION_JSON).entity(js)
+        .build();
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/NotFoundException.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/NotFoundException.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/NotFoundException.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/NotFoundException.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,44 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.webapp;
+
+import javax.ws.rs.WebApplicationException;
+import javax.ws.rs.core.Response.Status;
+
+/*
+ * Created our own NotFoundException because com.sun.jersey.api.NotFoundException
+ * sets the Response and therefore won't be handled by the GenericExceptionhandler
+ * to fill in correct response.
+ */
+public class NotFoundException extends WebApplicationException {
+
+  private static final long serialVersionUID = 1L;
+
+  public NotFoundException() {
+    super(Status.NOT_FOUND);
+  }
+
+  public NotFoundException(java.lang.Throwable cause) {
+    super(cause, Status.NOT_FOUND);
+  }
+
+  public NotFoundException(String msg) {
+    super(new Exception(msg), Status.NOT_FOUND);
+  }
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApp.java Tue Dec 13 23:05:56 2011
@@ -18,24 +18,29 @@
 
 package org.apache.hadoop.yarn.webapp;
 
-import com.google.common.base.CharMatcher;
-import static com.google.common.base.Preconditions.*;
-import com.google.common.base.Splitter;
-import com.google.common.collect.Lists;
-import com.google.inject.Provides;
-import com.google.inject.servlet.GuiceFilter;
-import com.google.inject.servlet.ServletModule;
+import static com.google.common.base.Preconditions.checkNotNull;
 
 import java.util.ArrayList;
+import java.util.HashMap;
 import java.util.List;
+import java.util.Map;
 
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.http.HttpServer;
-import org.apache.hadoop.yarn.util.StringHelper;
-
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.CharMatcher;
+import com.google.common.base.Splitter;
+import com.google.common.collect.Lists;
+import com.google.inject.Provides;
+import com.google.inject.servlet.GuiceFilter;
+import com.google.inject.servlet.ServletModule;
+import com.sun.jersey.api.core.ResourceConfig;
+import com.sun.jersey.core.util.FeaturesAndProperties;
+import com.sun.jersey.guice.spi.container.servlet.GuiceContainer;
+import com.sun.jersey.spi.container.servlet.ServletContainer;
+
 /**
  * @see WebApps for a usage example
  */
@@ -45,9 +50,10 @@ public abstract class WebApp extends Ser
   public enum HTTP { GET, POST, HEAD, PUT, DELETE };
 
   private volatile String name;
-  private volatile List<String> servePathSpecs = new ArrayList<String>(); 
+  private volatile List<String> servePathSpecs = new ArrayList<String>();
   // path to redirect to if user goes to "/"
   private volatile String redirectPath;
+  private volatile String wsName;
   private volatile Configuration conf;
   private volatile HttpServer httpServer;
   private volatile GuiceFilter guiceFilter;
@@ -104,18 +110,20 @@ public abstract class WebApp extends Ser
 
   void addServePathSpec(String path) { this.servePathSpecs.add(path); }
 
-  public String[] getServePathSpecs() { 
+  public String[] getServePathSpecs() {
     return this.servePathSpecs.toArray(new String[this.servePathSpecs.size()]);
   }
 
   /**
-   * Set a path to redirect the user to if they just go to "/". For 
-   * instance "/" goes to "/yarn/apps". This allows the filters to 
+   * Set a path to redirect the user to if they just go to "/". For
+   * instance "/" goes to "/yarn/apps". This allows the filters to
    * more easily differentiate the different webapps.
    * @param path  the path to redirect to
    */
   void setRedirectPath(String path) { this.redirectPath = path; }
 
+  void setWebServices (String name) { this.wsName = name; }
+
   public String getRedirectPath() { return this.redirectPath; }
 
   void setHostClass(Class<?> cls) {
@@ -129,10 +137,32 @@ public abstract class WebApp extends Ser
   @Override
   public void configureServlets() {
     setup();
+
     serve("/", "/__stop").with(Dispatcher.class);
+
     for (String path : this.servePathSpecs) {
       serve(path).with(Dispatcher.class);
     }
+
+    // Add in the web services filters/serves if app has them.
+    // Using Jersey/guice integration module. If user has web services
+    // they must have also bound a default one in their webapp code.
+    if (this.wsName != null) {
+      // There seems to be an issue with the guice/jersey integration
+      // where we have to list the stuff we don't want it to serve
+      // through the guicecontainer. In this case its everything except
+      // the the web services api prefix. We can't just change the filter
+      // from /* below - that doesn't work.
+      String regex = "(?!/" + this.wsName + ")";
+      serveRegex(regex).with(DefaultWrapperServlet.class);
+
+      Map<String, String> params = new HashMap<String, String>();
+      params.put(ResourceConfig.FEATURE_IMPLICIT_VIEWABLES, "true");
+      params.put(ServletContainer.FEATURE_FILTER_FORWARD_ON_404, "true");
+      params.put(FeaturesAndProperties.FEATURE_XMLROOTELEMENT_PROCESSING, "true");
+      filter("/*").through(GuiceContainer.class, params);
+    }
+
   }
 
   /**

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-common/src/main/java/org/apache/hadoop/yarn/webapp/WebApps.java Tue Dec 13 23:05:56 2011
@@ -72,6 +72,7 @@ public class WebApps {
     }
     
     final String name;
+    final String wsName;
     final Class<T> api;
     final T application;
     String bindAddress = "0.0.0.0";
@@ -82,10 +83,15 @@ public class WebApps {
     private final HashSet<ServletStruct> servlets = new HashSet<ServletStruct>();
     private final HashMap<String, Object> attributes = new HashMap<String, Object>();
 
-    Builder(String name, Class<T> api, T application) {
+    Builder(String name, Class<T> api, T application, String wsName) {
       this.name = name;
       this.api = api;
       this.application = application;
+      this.wsName = wsName;
+    }
+
+    Builder(String name, Class<T> api, T application) {
+      this(name, api, application, null);
     }
 
     public Builder<T> at(String bindAddress) {
@@ -142,6 +148,7 @@ public class WebApps {
         };
       }
       webapp.setName(name);
+      webapp.setWebServices(wsName);
       String basePath = "/" + name;
       webapp.setRedirectPath(basePath);
       if (basePath.equals("/")) { 
@@ -150,6 +157,14 @@ public class WebApps {
         webapp.addServePathSpec(basePath);
         webapp.addServePathSpec(basePath + "/*");
       }
+      if (wsName != null && !wsName.equals(basePath)) {
+        if (wsName.equals("/")) { 
+          webapp.addServePathSpec("/*");
+        } else {
+          webapp.addServePathSpec("/" + wsName);
+          webapp.addServePathSpec("/" + wsName + "/*");
+        }
+      }
       if (conf == null) {
         conf = new Configuration();
       }
@@ -238,6 +253,20 @@ public class WebApps {
    * @param prefix of the webapp
    * @param api the api class for the application
    * @param app the application instance
+   * @param wsPrefix the prefix for the webservice api for this app
+   * @return a webapp builder
+   */
+  public static <T> Builder<T> $for(String prefix, Class<T> api, T app, String wsPrefix) {
+    return new Builder<T>(prefix, api, app, wsPrefix);
+  }
+
+  /**
+   * Create a new webapp builder.
+   * @see WebApps for a complete example
+   * @param <T> application (holding the embedded webapp) type
+   * @param prefix of the webapp
+   * @param api the api class for the application
+   * @param app the application instance
    * @return a webapp builder
    */
   public static <T> Builder<T> $for(String prefix, Class<T> api, T app) {

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllApplicationsPage.java Tue Dec 13 23:05:56 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -88,13 +88,11 @@ public class AllApplicationsPage extends
                .tbody();
       for (Entry<ApplicationId, Application> entry : this.nmContext
           .getApplications().entrySet()) {
-        ApplicationId appId = entry.getKey();
-        Application app = entry.getValue();
-        String appIdStr = ConverterUtils.toString(appId);
+        AppInfo info = new AppInfo(entry.getValue());
         tableBody
           .tr()
-            .td().a(url("application", appIdStr), appIdStr)._()
-            .td()._(app.getApplicationState())
+            .td().a(url("application", info.getId()), info.getId())._()
+            .td()._(info.getState())
             ._()
           ._();
       }

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/AllContainersPage.java Tue Dec 13 23:05:56 2011
@@ -28,9 +28,9 @@ import java.util.Map.Entry;
 import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
-import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.BODY;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
@@ -83,17 +83,14 @@ public class AllContainersPage extends N
           ._().tbody();
       for (Entry<ContainerId, Container> entry : this.nmContext
           .getContainers().entrySet()) {
-        ContainerId containerId = entry.getKey();
-        Container container = entry.getValue();
-        String containerIdStr = ConverterUtils.toString(containerId);
+        ContainerInfo info = new ContainerInfo(this.nmContext, entry.getValue());
         tableBody
           .tr()
-            .td().a(url("container", containerIdStr), containerIdStr)
+            .td().a(url("container", info.getId()), info.getId())
             ._()
-            .td()._(container.getContainerState())._()
+            .td()._(info.getState())._()
             .td()
-                .a(url("containerlogs", containerIdStr, container.getUser()),
-                    "logs")._()
+                .a(url(info.getShortLogLink()), "logs")._()
           ._();
       }
       tableBody._()._()._();

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ApplicationPage.java Tue Dec 13 23:05:56 2011
@@ -23,19 +23,16 @@ import static org.apache.hadoop.yarn.web
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.tableInit;
 
-import java.util.Map;
-
 import org.apache.hadoop.conf.Configuration;
 import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.apache.hadoop.yarn.api.records.ContainerId;
 import org.apache.hadoop.yarn.factories.RecordFactory;
 import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
-import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
 import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -81,15 +78,14 @@ public class ApplicationPage extends NMV
           ConverterUtils.toApplicationId(this.recordFactory,
               $(APPLICATION_ID));
       Application app = this.nmContext.getApplications().get(applicationID);
-      Map<ContainerId, Container> containers = app.getContainers();
+      AppInfo info = new AppInfo(app);
       info("Application's information")
-            ._("ApplicationId", ConverterUtils.toString(app.getAppId()))
-            ._("ApplicationState", app.getApplicationState().toString())
-            ._("User", app.getUser());
+            ._("ApplicationId", info.getId())
+            ._("ApplicationState", info.getState())
+            ._("User", info.getUser());
       TABLE<Hamlet> containersListBody = html._(InfoBlock.class)
           .table("#containers");
-      for (ContainerId containerId : containers.keySet()) {
-        String containerIdStr = ConverterUtils.toString(containerId);
+      for (String containerIdStr : info.getContainers()) {
         containersListBody
                .tr().td()
                  .a(url("container", containerIdStr), containerIdStr)

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/ContainerPage.java Tue Dec 13 23:05:56 2011
@@ -18,18 +18,16 @@
 
 package org.apache.hadoop.yarn.server.nodemanager.webapp;
 
-import static org.apache.hadoop.yarn.util.StringHelper.ujoin;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
 import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
 
 import org.apache.hadoop.yarn.api.records.ContainerId;
-import org.apache.hadoop.yarn.api.records.ContainerStatus;
-import org.apache.hadoop.yarn.conf.YarnConfiguration;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
 import org.apache.hadoop.yarn.util.ConverterUtils;
-import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.SubView;
+import org.apache.hadoop.yarn.webapp.YarnWebParams;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.DIV;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -77,21 +75,16 @@ public class ContainerPage extends NMVie
                 + "please go back to the previous page and retry.")._();
         return;
       }
-      ContainerStatus containerData = container.cloneAndGetContainerStatus();
-      int exitCode = containerData.getExitStatus();
-      String exiStatus = 
-          (exitCode == YarnConfiguration.INVALID_CONTAINER_EXIT_STATUS) ? 
-              "N/A" : String.valueOf(exitCode);
+      ContainerInfo info = new ContainerInfo(this.nmContext, container);
+
       info("Container information")
-        ._("ContainerID", $(CONTAINER_ID))
-        ._("ContainerState", container.getContainerState())
-        ._("ExitStatus", exiStatus)
-        ._("Diagnostics", containerData.getDiagnostics())
-        ._("User", container.getUser())
-        ._("TotalMemoryNeeded",
-            container.getLaunchContext().getResource().getMemory())
-        ._("logs", ujoin("containerlogs", $(CONTAINER_ID), container.getUser()),
-            "Link to logs");
+        ._("ContainerID", info.getId())
+        ._("ContainerState", info.getState())
+        ._("ExitStatus", info.getExitStatus())
+        ._("Diagnostics", info.getDiagnostics())
+        ._("User", info.getUser())
+        ._("TotalMemoryNeeded", info.getMemoryNeeded())
+        ._("logs", info.getShortLogLink(), "Link to logs");
       html._(InfoBlock.class);
     }
   }

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/JAXBContextResolver.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/JAXBContextResolver.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/JAXBContextResolver.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/JAXBContextResolver.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,62 @@
+/**
+* Licensed to the Apache Software Foundation (ASF) under one
+* or more contributor license agreements.  See the NOTICE file
+* distributed with this work for additional information
+* regarding copyright ownership.  The ASF licenses this file
+* to you under the Apache License, Version 2.0 (the
+* "License"); you may not use this file except in compliance
+* with the License.  You may obtain a copy of the License at
+*
+*     http://www.apache.org/licenses/LICENSE-2.0
+*
+* Unless required by applicable law or agreed to in writing, software
+* distributed under the License is distributed on an "AS IS" BASIS,
+* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+* See the License for the specific language governing permissions and
+* limitations under the License.
+*/
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp;
+
+import java.util.Set;
+import java.util.HashSet;
+import java.util.Arrays;
+
+import com.sun.jersey.api.json.JSONConfiguration;
+import com.sun.jersey.api.json.JSONJAXBContext;
+import com.google.inject.Singleton;
+
+import javax.ws.rs.ext.ContextResolver;
+import javax.ws.rs.ext.Provider;
+import javax.xml.bind.JAXBContext;
+
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppsInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainersInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NodeInfo;
+
+@Singleton
+@Provider
+public class JAXBContextResolver implements ContextResolver<JAXBContext> {
+
+  private JAXBContext context;
+  private final Set<Class> types;
+    
+  // you have to specify all the dao classes here
+  private final Class[] cTypes = {AppInfo.class, AppsInfo.class, 
+      ContainerInfo.class, ContainersInfo.class, NodeInfo.class};
+    
+  public JAXBContextResolver() throws Exception {
+    this.types = new HashSet<Class>(Arrays.asList(cTypes));
+    // sets the json configuration so that the json output looks like 
+    // the xml output
+    this.context = new JSONJAXBContext(JSONConfiguration.natural().
+        rootUnwrapping(false).build(), cTypes);
+  }
+    
+  @Override
+  public JAXBContext getContext(Class<?> objectType) {
+    return (types.contains(objectType)) ? context : null;
+  }
+}

Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NMWebServices.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,163 @@
+/** * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.yarn.server.nodemanager.webapp;
+
+import java.util.Map.Entry;
+
+import javax.ws.rs.GET;
+import javax.ws.rs.Path;
+import javax.ws.rs.PathParam;
+import javax.ws.rs.Produces;
+import javax.ws.rs.QueryParam;
+import javax.ws.rs.core.MediaType;
+import javax.ws.rs.core.UriInfo;
+
+import org.apache.hadoop.yarn.api.records.ApplicationId;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.factories.RecordFactory;
+import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
+import org.apache.hadoop.yarn.server.nodemanager.Context;
+import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.Application;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.application.ApplicationState;
+import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.AppsInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainerInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.ContainersInfo;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NodeInfo;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.webapp.NotFoundException;
+import org.apache.hadoop.yarn.webapp.WebApp;
+
+import com.google.inject.Inject;
+import com.google.inject.Singleton;
+
+@Singleton
+@Path("/ws/v1/node")
+public class NMWebServices {
+  private Context nmContext;
+  private ResourceView rview;
+  private WebApp webapp;
+  private static RecordFactory recordFactory = RecordFactoryProvider
+      .getRecordFactory(null);
+
+  @javax.ws.rs.core.Context
+  UriInfo uriInfo;
+
+  @Inject
+  public NMWebServices(final Context nm, final ResourceView view,
+      final WebApp webapp) {
+    this.nmContext = nm;
+    this.rview = view;
+    this.webapp = webapp;
+  }
+
+  @GET
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public NodeInfo get() {
+    return getNodeInfo();
+  }
+
+  @GET
+  @Path("/info")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public NodeInfo getNodeInfo() {
+    return new NodeInfo(this.nmContext, this.rview);
+  }
+
+  @GET
+  @Path("/apps")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppsInfo getNodeApps(@QueryParam("state") String stateQuery,
+      @QueryParam("user") String userQuery) {
+    AppsInfo allApps = new AppsInfo();
+    for (Entry<ApplicationId, Application> entry : this.nmContext
+        .getApplications().entrySet()) {
+
+      AppInfo appInfo = new AppInfo(entry.getValue());
+      if (stateQuery != null && !stateQuery.isEmpty()) {
+        ApplicationState state = ApplicationState.valueOf(stateQuery);
+        if (!appInfo.getState().equalsIgnoreCase(stateQuery)) {
+          continue;
+        }
+      }
+      if (userQuery != null && !userQuery.isEmpty()) {
+        if (!appInfo.getUser().toString().equals(userQuery)) {
+          continue;
+        }
+      }
+      allApps.add(appInfo);
+    }
+    return allApps;
+  }
+
+  @GET
+  @Path("/apps/{appid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public AppInfo getNodeApp(@PathParam("appid") String appId) {
+    ApplicationId id = ConverterUtils.toApplicationId(recordFactory, appId);
+    if (id == null) {
+      throw new NotFoundException("app with id " + appId + " not found");
+    }
+    Application app = this.nmContext.getApplications().get(id);
+    if (app == null) {
+      throw new NotFoundException("app with id " + appId + " not found");
+    }
+    return new AppInfo(app);
+
+  }
+
+  @GET
+  @Path("/containers")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ContainersInfo getNodeContainers() {
+    ContainersInfo allContainers = new ContainersInfo();
+    for (Entry<ContainerId, Container> entry : this.nmContext.getContainers()
+        .entrySet()) {
+      if (entry.getValue() == null) {
+        // just skip it
+        continue;
+      }
+      ContainerInfo info = new ContainerInfo(this.nmContext, entry.getValue(),
+          uriInfo.getBaseUri().toString(), webapp.name());
+      allContainers.add(info);
+    }
+    return allContainers;
+  }
+
+  @GET
+  @Path("/containers/{containerid}")
+  @Produces({ MediaType.APPLICATION_JSON, MediaType.APPLICATION_XML })
+  public ContainerInfo getNodeContainer(@PathParam("containerid") String id) {
+    ContainerId containerId = null;
+    containerId = ConverterUtils.toContainerId(id);
+    if (containerId == null) {
+      throw new NotFoundException("container with id, " + id
+          + ", is empty or null");
+    }
+    Container container = nmContext.getContainers().get(containerId);
+    if (container == null) {
+      throw new NotFoundException("container with id, " + id + ", not found");
+    }
+    return new ContainerInfo(this.nmContext, container, uriInfo.getBaseUri()
+        .toString(), webapp.name());
+
+  }
+
+}

Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/webapp/NodePage.java Tue Dec 13 23:05:56 2011
@@ -23,10 +23,10 @@ import static org.apache.hadoop.yarn.web
 
 import java.util.Date;
 
-import org.apache.hadoop.util.VersionInfo;
-import org.apache.hadoop.yarn.util.YarnVersionInfo;
+import org.apache.hadoop.util.StringUtils;
 import org.apache.hadoop.yarn.server.nodemanager.Context;
 import org.apache.hadoop.yarn.server.nodemanager.ResourceView;
+import org.apache.hadoop.yarn.server.nodemanager.webapp.dao.NodeInfo;
 import org.apache.hadoop.yarn.webapp.SubView;
 import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.HTML;
 import org.apache.hadoop.yarn.webapp.view.HtmlBlock;
@@ -36,6 +36,8 @@ import com.google.inject.Inject;
 
 public class NodePage extends NMView {
 
+  private static final long BYTES_IN_MB = 1024 * 1024;
+
   @Override
   protected void commonPreHead(HTML<_> html) {
     super.commonPreHead(html);
@@ -60,21 +62,22 @@ public class NodePage extends NMView {
 
     @Override
     protected void render(Block html) {
+      NodeInfo info = new NodeInfo(this.context, this.resourceView);
       info("NodeManager information")
           ._("Total Vmem allocated for Containers",
-              this.resourceView.getVmemAllocatedForContainers() + "bytes")
+              StringUtils.byteDesc(info.getTotalVmemAllocated() * BYTES_IN_MB))
           ._("Total Pmem allocated for Container",
-              this.resourceView.getPmemAllocatedForContainers() + "bytes")
+              StringUtils.byteDesc(info.getTotalPmemAllocated() * BYTES_IN_MB))
           ._("NodeHealthyStatus",
-              this.context.getNodeHealthStatus().getIsNodeHealthy())
+              info.getHealthStatus())
           ._("LastNodeHealthTime", new Date(
-                this.context.getNodeHealthStatus().getLastHealthReportTime()))
+              info.getLastNodeUpdateTime()))
           ._("NodeHealthReport",
-              this.context.getNodeHealthStatus().getHealthReport())
-          ._("Node Manager Version:", YarnVersionInfo.getBuildVersion() +
-              " on " + YarnVersionInfo.getDate())
-          ._("Hadoop Version:", VersionInfo.getBuildVersion() +
-              " on " + VersionInfo.getDate());
+              info.getHealthReport())
+          ._("Node Manager Version:", info.getNMBuildVersion() +
+              " on " + info.getNMVersionBuiltOn())
+          ._("Hadoop Version:", info.getHadoopBuildVersion() +
+              " on " + info.getHadoopVersionBuiltOn());
       html._(InfoBlock.class);
     }
   }