You are viewing a plain text version of this content. The canonical link for it is here.
Posted to mapreduce-commits@hadoop.apache.org by vi...@apache.org on 2011/12/14 00:05:59 UTC
svn commit: r1213975 [2/6] - in
/hadoop/common/trunk/hadoop-mapreduce-project: ./ hadoop-mapreduce-client/
hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/client/
hadoop-mapreduce-client/hadoop-mapre...
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,349 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import static org.apache.hadoop.yarn.util.StringHelper.percent;
+
+import java.util.ArrayList;
+import java.util.List;
+import java.util.Map;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.JobACL;
+import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptId;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskId;
+import org.apache.hadoop.mapreduce.v2.app.job.Job;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
+import org.apache.hadoop.security.authorize.AccessControlList;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "job")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobInfo {
+
+ // ok for any user to see
+ protected long startTime;
+ protected long finishTime;
+ protected long elapsedTime;
+ protected String id;
+ protected String name;
+ protected String user;
+ protected String state;
+ protected int mapsTotal;
+ protected int mapsCompleted;
+ protected float mapProgress;
+ protected int reducesTotal;
+ protected int reducesCompleted;
+ protected float reduceProgress;
+
+ @XmlTransient
+ protected String mapProgressPercent;
+ @XmlTransient
+ protected String reduceProgressPercent;
+
+ // these should only be seen if acls allow
+ protected int mapsPending;
+ protected int mapsRunning;
+ protected int reducesPending;
+ protected int reducesRunning;
+ protected boolean uberized;
+ protected String diagnostics;
+ protected int newReduceAttempts = 0;
+ protected int runningReduceAttempts = 0;
+ protected int failedReduceAttempts = 0;
+ protected int killedReduceAttempts = 0;
+ protected int successfulReduceAttempts = 0;
+ protected int newMapAttempts = 0;
+ protected int runningMapAttempts = 0;
+ protected int failedMapAttempts = 0;
+ protected int killedMapAttempts = 0;
+ protected int successfulMapAttempts = 0;
+ protected ArrayList<ConfEntryInfo> acls;
+
+ @XmlTransient
+ protected int numMaps;
+ @XmlTransient
+ protected int numReduces;
+
+ public JobInfo() {
+ }
+
+ public JobInfo(Job job, Boolean hasAccess) {
+ this.id = MRApps.toString(job.getID());
+ JobReport report = job.getReport();
+ countTasksAndAttempts(job);
+ this.startTime = report.getStartTime();
+ this.finishTime = report.getFinishTime();
+ this.elapsedTime = Times.elapsed(this.startTime, this.finishTime);
+ if (this.elapsedTime == -1) {
+ this.elapsedTime = 0;
+ }
+ this.name = job.getName().toString();
+ this.user = job.getUserName();
+ this.state = job.getState().toString();
+ this.mapsTotal = job.getTotalMaps();
+ this.mapsCompleted = job.getCompletedMaps();
+ this.mapProgress = report.getMapProgress() * 100;
+ this.mapProgressPercent = percent(report.getMapProgress());
+ this.reducesTotal = job.getTotalReduces();
+ this.reducesCompleted = job.getCompletedReduces();
+ this.reduceProgress = report.getReduceProgress() * 100;
+ this.reduceProgressPercent = percent(report.getReduceProgress());
+
+ this.acls = new ArrayList<ConfEntryInfo>();
+ if (hasAccess) {
+ this.uberized = job.isUber();
+
+ List<String> diagnostics = job.getDiagnostics();
+ if (diagnostics != null && !diagnostics.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagnostics) {
+ b.append(diag);
+ }
+ this.diagnostics = b.toString();
+ }
+
+ Map<JobACL, AccessControlList> allacls = job.getJobACLs();
+ if (allacls != null) {
+ for (Map.Entry<JobACL, AccessControlList> entry : allacls.entrySet()) {
+ this.acls.add(new ConfEntryInfo(entry.getKey().getAclName(), entry
+ .getValue().getAclString()));
+ }
+ }
+ }
+ }
+
+ public int getNewReduceAttempts() {
+ return this.newReduceAttempts;
+ }
+
+ public int getKilledReduceAttempts() {
+ return this.killedReduceAttempts;
+ }
+
+ public int getFailedReduceAttempts() {
+ return this.failedReduceAttempts;
+ }
+
+ public int getRunningReduceAttempts() {
+ return this.runningReduceAttempts;
+ }
+
+ public int getSuccessfulReduceAttempts() {
+ return this.successfulReduceAttempts;
+ }
+
+ public int getNewMapAttempts() {
+ return this.newMapAttempts;
+ }
+
+ public int getKilledMapAttempts() {
+ return this.killedMapAttempts;
+ }
+
+ public ArrayList<ConfEntryInfo> getAcls() {
+ return acls;
+ }
+
+ public int getFailedMapAttempts() {
+ return this.failedMapAttempts;
+ }
+
+ public int getRunningMapAttempts() {
+ return this.runningMapAttempts;
+ }
+
+ public int getSuccessfulMapAttempts() {
+ return this.successfulMapAttempts;
+ }
+
+ public int getReducesCompleted() {
+ return this.reducesCompleted;
+ }
+
+ public int getReducesTotal() {
+ return this.reducesTotal;
+ }
+
+ public int getReducesPending() {
+ return this.reducesPending;
+ }
+
+ public int getReducesRunning() {
+ return this.reducesRunning;
+ }
+
+ public int getMapsCompleted() {
+ return this.mapsCompleted;
+ }
+
+ public int getMapsTotal() {
+ return this.mapsTotal;
+ }
+
+ public int getMapsPending() {
+ return this.mapsPending;
+ }
+
+ public int getMapsRunning() {
+ return this.mapsRunning;
+ }
+
+ public String getState() {
+ return this.state;
+ }
+
+ public String getUser() {
+ return this.user;
+ }
+
+ public String getName() {
+ return this.name;
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ public long getStartTime() {
+ return this.startTime;
+ }
+
+ public long getElapsedTime() {
+ return this.elapsedTime;
+ }
+
+ public long getFinishTime() {
+ return this.finishTime;
+ }
+
+ public boolean isUberized() {
+ return this.uberized;
+ }
+
+ public String getdiagnostics() {
+ return this.diagnostics;
+ }
+
+ public float getMapProgress() {
+ return this.mapProgress;
+ }
+
+ public String getMapProgressPercent() {
+ return this.mapProgressPercent;
+ }
+
+ public float getReduceProgress() {
+ return this.reduceProgress;
+ }
+
+ public String getReduceProgressPercent() {
+ return this.reduceProgressPercent;
+ }
+
+ /**
+ * Go through a job and update the member variables with counts for
+ * information to output in the page.
+ *
+ * @param job
+ * the job to get counts for.
+ */
+ private void countTasksAndAttempts(Job job) {
+ numReduces = 0;
+ numMaps = 0;
+ final Map<TaskId, Task> tasks = job.getTasks();
+ if (tasks == null) {
+ return;
+ }
+ for (Task task : tasks.values()) {
+ switch (task.getType()) {
+ case MAP:
+ // Task counts
+ switch (task.getState()) {
+ case RUNNING:
+ ++this.mapsRunning;
+ break;
+ case SCHEDULED:
+ ++this.mapsPending;
+ break;
+ }
+ break;
+ case REDUCE:
+ // Task counts
+ switch (task.getState()) {
+ case RUNNING:
+ ++this.reducesRunning;
+ break;
+ case SCHEDULED:
+ ++this.reducesPending;
+ break;
+ }
+ break;
+ }
+ // Attempts counts
+ Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
+ int newAttempts, running, successful, failed, killed;
+ for (TaskAttempt attempt : attempts.values()) {
+
+ newAttempts = 0;
+ running = 0;
+ successful = 0;
+ failed = 0;
+ killed = 0;
+ if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
+ ++newAttempts;
+ } else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt.getState())) {
+ ++running;
+ } else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
+ .getState())) {
+ ++successful;
+ } else if (TaskAttemptStateUI.FAILED.correspondsTo(attempt.getState())) {
+ ++failed;
+ } else if (TaskAttemptStateUI.KILLED.correspondsTo(attempt.getState())) {
+ ++killed;
+ }
+
+ switch (task.getType()) {
+ case MAP:
+ this.newMapAttempts += newAttempts;
+ this.runningMapAttempts += running;
+ this.successfulMapAttempts += successful;
+ this.failedMapAttempts += failed;
+ this.killedMapAttempts += killed;
+ break;
+ case REDUCE:
+ this.newReduceAttempts += newAttempts;
+ this.runningReduceAttempts += running;
+ this.successfulReduceAttempts += successful;
+ this.failedReduceAttempts += failed;
+ this.killedReduceAttempts += killed;
+ break;
+ }
+ }
+ }
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskAttemptCounterInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,63 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+
+@XmlRootElement(name = "JobTaskAttemptCounters")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobTaskAttemptCounterInfo {
+
+ @XmlTransient
+ protected Counters total = null;
+
+ protected String id;
+ protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
+
+ public JobTaskAttemptCounterInfo() {
+ }
+
+ public JobTaskAttemptCounterInfo(TaskAttempt taskattempt) {
+
+ long value = 0;
+ this.id = MRApps.toString(taskattempt.getID());
+ total = taskattempt.getCounters();
+ taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
+ if (total != null) {
+ for (CounterGroup g : total.getAllCounterGroups().values()) {
+ if (g != null) {
+ TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
+ if (cginfo != null) {
+ taskCounterGroups.add(cginfo);
+ }
+ }
+ }
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobTaskCounterInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,59 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+import org.apache.hadoop.mapreduce.v2.api.records.Counters;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+
+@XmlRootElement(name = "jobTaskCounters")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobTaskCounterInfo {
+
+ @XmlTransient
+ protected Counters total = null;
+
+ protected String id;
+ protected ArrayList<TaskCounterGroupInfo> taskCounterGroups;
+
+ public JobTaskCounterInfo() {
+ }
+
+ public JobTaskCounterInfo(Task task) {
+ total = task.getCounters();
+ this.id = MRApps.toString(task.getID());
+ taskCounterGroups = new ArrayList<TaskCounterGroupInfo>();
+ if (total != null) {
+ for (CounterGroup g : total.getAllCounterGroups().values()) {
+ if (g != null) {
+ TaskCounterGroupInfo cginfo = new TaskCounterGroupInfo(g.getName(), g);
+ taskCounterGroups.add(cginfo);
+ }
+ }
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/JobsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by joblicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "jobs")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class JobsInfo {
+
+ protected ArrayList<JobInfo> job = new ArrayList<JobInfo>();
+
+ public JobsInfo() {
+ } // JAXB needs this
+
+ public void add(JobInfo jobInfo) {
+ job.add(jobInfo);
+ }
+
+ public ArrayList<JobInfo> getJobs() {
+ return job;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/ReduceTaskAttemptInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,83 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "taskAttempt")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class ReduceTaskAttemptInfo extends TaskAttemptInfo {
+
+ protected long shuffleFinishTime;
+ protected long mergeFinishTime;
+ protected long elapsedShuffleTime;
+ protected long elapsedMergeTime;
+ protected long elapsedReduceTime;
+
+ public ReduceTaskAttemptInfo() {
+ }
+
+ public ReduceTaskAttemptInfo(TaskAttempt ta, TaskType type) {
+ super(ta, type, false);
+
+ this.shuffleFinishTime = ta.getShuffleFinishTime();
+ this.mergeFinishTime = ta.getSortFinishTime();
+ this.elapsedShuffleTime = Times.elapsed(this.startTime,
+ this.shuffleFinishTime, false);
+ if (this.elapsedShuffleTime == -1) {
+ this.elapsedShuffleTime = 0;
+ }
+ this.elapsedMergeTime = Times.elapsed(this.shuffleFinishTime,
+ this.mergeFinishTime, false);
+ if (this.elapsedMergeTime == -1) {
+ this.elapsedMergeTime = 0;
+ }
+ this.elapsedReduceTime = Times.elapsed(this.mergeFinishTime,
+ this.finishTime, false);
+ if (this.elapsedReduceTime == -1) {
+ this.elapsedReduceTime = 0;
+ }
+ }
+
+ public long getShuffleFinishTime() {
+ return this.shuffleFinishTime;
+ }
+
+ public long getMergeFinishTime() {
+ return this.mergeFinishTime;
+ }
+
+ public long getElapsedShuffleTime() {
+ return this.elapsedShuffleTime;
+ }
+
+ public long getElapsedMergeTime() {
+ return this.elapsedMergeTime;
+ }
+
+ public long getElapsedReduceTime() {
+ return this.elapsedReduceTime;
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,133 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.List;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlSeeAlso;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.api.records.ContainerId;
+import org.apache.hadoop.yarn.util.ConverterUtils;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "taskAttempt")
+@XmlSeeAlso({ ReduceTaskAttemptInfo.class })
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TaskAttemptInfo {
+
+ protected long startTime;
+ protected long finishTime;
+ protected long elapsedTime;
+ protected float progress;
+ protected String id;
+ protected String rack;
+ protected String state;
+ protected String nodeHttpAddress;
+ protected String diagnostics;
+ protected String type;
+ protected String assignedContainerId;
+
+ @XmlTransient
+ protected ContainerId assignedContainer;
+
+ public TaskAttemptInfo() {
+ }
+
+ public TaskAttemptInfo(TaskAttempt ta, Boolean isRunning) {
+ this(ta, TaskType.MAP, isRunning);
+ }
+
+ public TaskAttemptInfo(TaskAttempt ta, TaskType type, Boolean isRunning) {
+ this.type = type.toString();
+ this.id = MRApps.toString(ta.getID());
+ this.nodeHttpAddress = ta.getNodeHttpAddress();
+ this.startTime = ta.getLaunchTime();
+ this.finishTime = ta.getFinishTime();
+ this.assignedContainerId = ConverterUtils.toString(ta
+ .getAssignedContainerID());
+ this.assignedContainer = ta.getAssignedContainerID();
+ this.progress = ta.getProgress() * 100;
+ this.state = ta.getState().toString();
+ this.elapsedTime = Times
+ .elapsed(this.startTime, this.finishTime, isRunning);
+ if (this.elapsedTime == -1) {
+ this.elapsedTime = 0;
+ }
+ List<String> diagnostics = ta.getDiagnostics();
+ if (diagnostics != null && !diagnostics.isEmpty()) {
+ StringBuffer b = new StringBuffer();
+ for (String diag : diagnostics) {
+ b.append(diag);
+ }
+ this.diagnostics = b.toString();
+ }
+ this.rack = ta.getNodeRackName();
+ }
+
+ public String getAssignedContainerIdStr() {
+ return this.assignedContainerId;
+ }
+
+ public ContainerId getAssignedContainerId() {
+ return this.assignedContainer;
+ }
+
+ public String getState() {
+ return this.state;
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ public long getStartTime() {
+ return this.startTime;
+ }
+
+ public long getFinishTime() {
+ return this.finishTime;
+ }
+
+ public float getProgress() {
+ return this.progress;
+ }
+
+ public long getElapsedTime() {
+ return this.elapsedTime;
+ }
+
+ public String getNode() {
+ return this.nodeHttpAddress;
+ }
+
+ public String getRack() {
+ return this.rack;
+ }
+
+ public String getNote() {
+ return this.diagnostics;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskAttemptsInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by taskattemptlicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "taskattempts")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TaskAttemptsInfo {
+
+ protected ArrayList<TaskAttemptInfo> taskattempt = new ArrayList<TaskAttemptInfo>();
+
+ public TaskAttemptsInfo() {
+ } // JAXB needs this
+
+ public void add(TaskAttemptInfo taskattemptInfo) {
+ taskattempt.add(taskattemptInfo);
+ }
+
+ public ArrayList<TaskAttemptInfo> getTaskAttempts() {
+ return taskattempt;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterGroupInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,49 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+import org.apache.hadoop.mapreduce.v2.api.records.Counter;
+import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
+
+@XmlRootElement
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TaskCounterGroupInfo {
+
+ protected String counterGroupName;
+ protected ArrayList<TaskCounterInfo> counter;
+
+ public TaskCounterGroupInfo() {
+ }
+
+ public TaskCounterGroupInfo(String name, CounterGroup g) {
+ this.counterGroupName = name;
+ this.counter = new ArrayList<TaskCounterInfo>();
+
+ for (Counter c : g.getAllCounters().values()) {
+ TaskCounterInfo cinfo = new TaskCounterInfo(c.getName(), c.getValue());
+ this.counter.add(cinfo);
+ }
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskCounterInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,46 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "counter")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TaskCounterInfo {
+
+ protected String name;
+ protected long value;
+
+ public TaskCounterInfo() {
+ }
+
+ public TaskCounterInfo(String name, long value) {
+ this.name = name;
+ this.value = value;
+ }
+
+ public String getName() {
+ return name;
+ }
+
+ public long getValue() {
+ return value;
+ }
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TaskInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,122 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+import javax.xml.bind.annotation.XmlTransient;
+
+import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
+import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
+import org.apache.hadoop.mapreduce.v2.app.job.Task;
+import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.yarn.util.Times;
+
+@XmlRootElement(name = "task")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TaskInfo {
+
+ protected long startTime;
+ protected long finishTime;
+ protected long elapsedTime;
+ protected float progress;
+ protected String id;
+ protected String state;
+ protected String type;
+ protected String successfulAttempt;
+
+ @XmlTransient
+ int taskNum;
+
+ @XmlTransient
+ TaskAttempt successful;
+
+ public TaskInfo() {
+ }
+
+ public TaskInfo(Task task) {
+ TaskType ttype = task.getType();
+ this.type = ttype.toString();
+ TaskReport report = task.getReport();
+ this.startTime = report.getStartTime();
+ this.finishTime = report.getFinishTime();
+ this.elapsedTime = Times.elapsed(this.startTime, this.finishTime, false);
+ if (this.elapsedTime == -1) {
+ this.elapsedTime = 0;
+ }
+ this.state = report.getTaskState().toString();
+ this.progress = report.getProgress() * 100;
+ this.id = MRApps.toString(task.getID());
+ this.taskNum = task.getID().getId();
+ this.successful = getSuccessfulAttempt(task);
+ if (successful != null) {
+ this.successfulAttempt = MRApps.toString(successful.getID());
+ } else {
+ this.successfulAttempt = "";
+ }
+ }
+
+ public float getProgress() {
+ return this.progress;
+ }
+
+ public String getState() {
+ return this.state;
+ }
+
+ public String getId() {
+ return this.id;
+ }
+
+ public int getTaskNum() {
+ return this.taskNum;
+ }
+
+ public long getStartTime() {
+ return this.startTime;
+ }
+
+ public long getFinishTime() {
+ return this.finishTime;
+ }
+
+ public long getElapsedTime() {
+ return this.elapsedTime;
+ }
+
+ public String getSuccessfulAttempt() {
+ return this.successfulAttempt;
+ }
+
+ public TaskAttempt getSuccessful() {
+ return this.successful;
+ }
+
+ private TaskAttempt getSuccessfulAttempt(Task task) {
+ for (TaskAttempt attempt : task.getAttempts().values()) {
+ if (attempt.getState() == TaskAttemptState.SUCCEEDED) {
+ return attempt;
+ }
+ }
+ return null;
+ }
+
+}
Added: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TasksInfo.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TasksInfo.java?rev=1213975&view=auto
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TasksInfo.java (added)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-app/src/main/java/org/apache/hadoop/mapreduce/v2/app/webapp/dao/TasksInfo.java Tue Dec 13 23:05:56 2011
@@ -0,0 +1,43 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by tasklicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+package org.apache.hadoop.mapreduce.v2.app.webapp.dao;
+
+import java.util.ArrayList;
+
+import javax.xml.bind.annotation.XmlAccessType;
+import javax.xml.bind.annotation.XmlAccessorType;
+import javax.xml.bind.annotation.XmlRootElement;
+
+@XmlRootElement(name = "tasks")
+@XmlAccessorType(XmlAccessType.FIELD)
+public class TasksInfo {
+
+ protected ArrayList<TaskInfo> task = new ArrayList<TaskInfo>();
+
+ public TasksInfo() {
+ } // JAXB needs this
+
+ public void add(TaskInfo taskInfo) {
+ task.add(taskInfo);
+ }
+
+ public ArrayList<TaskInfo> getTasks() {
+ return task;
+ }
+
+}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/jobhistory/JobHistoryParser.java Tue Dec 13 23:05:56 2011
@@ -353,7 +353,7 @@ public class JobHistoryParser {
* The class where job information is aggregated into after parsing
*/
public static class JobInfo {
- String errorInfo = "None";
+ String errorInfo = "";
long submitTime;
long finishTime;
JobID jobid;
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/HistoryClientService.java Tue Dec 13 23:05:56 2011
@@ -27,12 +27,11 @@ import java.security.PrivilegedException
import java.util.Arrays;
import java.util.Collection;
-import org.apache.hadoop.ipc.Server;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
-import org.apache.hadoop.fs.CommonConfigurationKeys;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
+import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.JobACL;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.FailTaskAttemptRequest;
@@ -79,14 +78,14 @@ import org.apache.hadoop.yarn.webapp.Web
import org.apache.hadoop.yarn.webapp.WebApps;
/**
- * This module is responsible for talking to the
+ * This module is responsible for talking to the
* JobClient (user facing).
*
*/
public class HistoryClientService extends AbstractService {
private static final Log LOG = LogFactory.getLog(HistoryClientService.class);
-
+
private MRClientProtocol protocolHandler;
private Server server;
private WebApp webApp;
@@ -118,22 +117,22 @@ public class HistoryClientService extend
server =
rpc.getServer(MRClientProtocol.class, protocolHandler, address,
conf, null,
- conf.getInt(JHAdminConfig.MR_HISTORY_CLIENT_THREAD_COUNT,
+ conf.getInt(JHAdminConfig.MR_HISTORY_CLIENT_THREAD_COUNT,
JHAdminConfig.DEFAULT_MR_HISTORY_CLIENT_THREAD_COUNT));
-
+
// Enable service authorization?
if (conf.getBoolean(
- CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
+ CommonConfigurationKeysPublic.HADOOP_SECURITY_AUTHORIZATION,
false)) {
server.refreshServiceAcl(conf, new MRAMPolicyProvider());
}
-
+
server.start();
this.bindAddress =
NetUtils.createSocketAddr(hostNameResolved.getHostAddress()
+ ":" + server.getPort());
LOG.info("Instantiated MRClientService at " + this.bindAddress);
-
+
super.start();
}
@@ -141,7 +140,7 @@ public class HistoryClientService extend
webApp = new HsWebApp(history);
String bindAddress = conf.get(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS,
JHAdminConfig.DEFAULT_MR_HISTORY_WEBAPP_ADDRESS);
- WebApps.$for("jobhistory", this).with(conf).at(bindAddress).start(webApp);
+ WebApps.$for("jobhistory", HistoryClientService.class, this, "ws").with(conf).at(bindAddress).start(webApp);
}
@Override
@@ -158,7 +157,7 @@ public class HistoryClientService extend
private class MRClientProtocolHandler implements MRClientProtocol {
private RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
-
+
private Job verifyAndGetJob(final JobId jobID) throws YarnRemoteException {
UserGroupInformation loginUgi = null;
Job job = null;
@@ -194,7 +193,7 @@ public class HistoryClientService extend
response.setCounters(job.getCounters());
return response;
}
-
+
@Override
public GetJobReportResponse getJobReport(GetJobReportRequest request) throws YarnRemoteException {
JobId jobId = request.getJobId();
@@ -227,23 +226,23 @@ public class HistoryClientService extend
JobId jobId = request.getJobId();
int fromEventId = request.getFromEventId();
int maxEvents = request.getMaxEvents();
-
+
Job job = verifyAndGetJob(jobId);
GetTaskAttemptCompletionEventsResponse response = recordFactory.newRecordInstance(GetTaskAttemptCompletionEventsResponse.class);
response.addAllCompletionEvents(Arrays.asList(job.getTaskAttemptCompletionEvents(fromEventId, maxEvents)));
return response;
}
-
+
@Override
public KillJobResponse killJob(KillJobRequest request) throws YarnRemoteException {
throw RPCUtil.getRemoteException("Invalid operation on completed job");
}
-
+
@Override
public KillTaskResponse killTask(KillTaskRequest request) throws YarnRemoteException {
throw RPCUtil.getRemoteException("Invalid operation on completed job");
}
-
+
@Override
public KillTaskAttemptResponse killTaskAttempt(KillTaskAttemptRequest request) throws YarnRemoteException {
throw RPCUtil.getRemoteException("Invalid operation on completed job");
@@ -252,15 +251,15 @@ public class HistoryClientService extend
@Override
public GetDiagnosticsResponse getDiagnostics(GetDiagnosticsRequest request) throws YarnRemoteException {
TaskAttemptId taskAttemptId = request.getTaskAttemptId();
-
+
Job job = verifyAndGetJob(taskAttemptId.getTaskId().getJobId());
-
+
GetDiagnosticsResponse response = recordFactory.newRecordInstance(GetDiagnosticsResponse.class);
response.addAllDiagnostics(job.getTask(taskAttemptId.getTaskId()).getAttempt(taskAttemptId).getDiagnostics());
return response;
}
- @Override
+ @Override
public FailTaskAttemptResponse failTaskAttempt(FailTaskAttemptRequest request) throws YarnRemoteException {
throw RPCUtil.getRemoteException("Invalid operation on completed job");
}
@@ -269,7 +268,7 @@ public class HistoryClientService extend
public GetTaskReportsResponse getTaskReports(GetTaskReportsRequest request) throws YarnRemoteException {
JobId jobId = request.getJobId();
TaskType taskType = request.getTaskType();
-
+
GetTaskReportsResponse response = recordFactory.newRecordInstance(GetTaskReportsResponse.class);
Job job = verifyAndGetJob(jobId);
Collection<Task> tasks = job.getTasks(taskType).values();
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsAboutPage.java Tue Dec 13 23:05:56 2011
@@ -21,7 +21,7 @@ package org.apache.hadoop.mapreduce.v2.h
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.ACCORDION;
import static org.apache.hadoop.yarn.webapp.view.JQueryUI.initID;
-import org.apache.hadoop.util.VersionInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.HistoryInfo;
import org.apache.hadoop.yarn.webapp.SubView;
import org.apache.hadoop.yarn.webapp.view.InfoBlock;
@@ -45,8 +45,9 @@ public class HsAboutPage extends HsView
* @return AttemptsBlock.class
*/
@Override protected Class<? extends SubView> content() {
+ HistoryInfo info = new HistoryInfo();
info("History Server").
- _("BuildVersion", VersionInfo.getBuildVersion());
+ _("BuildVersion", info.getHadoopBuildVersion() + " on " + info.getHadoopVersionBuiltOn());
return InfoBlock.class;
}
}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobBlock.java Tue Dec 13 23:05:56 2011
@@ -34,6 +34,9 @@ import org.apache.hadoop.mapreduce.v2.ap
import org.apache.hadoop.mapreduce.v2.app.job.Job;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ConfEntryInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.AMAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.mapreduce.v2.util.MRApps.TaskAttemptStateUI;
import org.apache.hadoop.security.authorize.AccessControlList;
@@ -56,19 +59,6 @@ import static org.apache.hadoop.yarn.web
public class HsJobBlock extends HtmlBlock {
final AppContext appContext;
- int killedMapAttempts = 0;
- int failedMapAttempts = 0;
- int successfulMapAttempts = 0;
- int killedReduceAttempts = 0;
- int failedReduceAttempts = 0;
- int successfulReduceAttempts = 0;
- long avgMapTime = 0;
- long avgReduceTime = 0;
- long avgShuffleTime = 0;
- long avgSortTime = 0;
- int numMaps;
- int numReduces;
-
@Inject HsJobBlock(AppContext appctx) {
appContext = appctx;
}
@@ -85,37 +75,30 @@ public class HsJobBlock extends HtmlBloc
return;
}
JobId jobID = MRApps.toJobID(jid);
- Job job = appContext.getJob(jobID);
- if (job == null) {
+ Job j = appContext.getJob(jobID);
+ if (j == null) {
html.
p()._("Sorry, ", jid, " not found.")._();
return;
}
- Map<JobACL, AccessControlList> acls = job.getJobACLs();
- List<AMInfo> amInfos = job.getAMInfos();
- JobReport jobReport = job.getReport();
- int mapTasks = job.getTotalMaps();
- int mapTasksComplete = job.getCompletedMaps();
- int reduceTasks = job.getTotalReduces();
- int reducesTasksComplete = job.getCompletedReduces();
- long startTime = jobReport.getStartTime();
- long finishTime = jobReport.getFinishTime();
- countTasksAndAttempts(job);
+ List<AMInfo> amInfos = j.getAMInfos();
+ JobInfo job = new JobInfo(j);
ResponseInfo infoBlock = info("Job Overview").
_("Job Name:", job.getName()).
_("User Name:", job.getUserName()).
_("Queue:", job.getQueueName()).
_("State:", job.getState()).
_("Uberized:", job.isUber()).
- _("Started:", new Date(startTime)).
- _("Finished:", new Date(finishTime)).
+ _("Started:", new Date(job.getStartTime())).
+ _("Finished:", new Date(job.getFinishTime())).
_("Elapsed:", StringUtils.formatTime(
- Times.elapsed(startTime, finishTime, false)));
+ Times.elapsed(job.getStartTime(), job.getFinishTime(), false)));
String amString =
amInfos.size() == 1 ? "ApplicationMaster" : "ApplicationMasters";
- List<String> diagnostics = job.getDiagnostics();
+ // todo - switch to use JobInfo
+ List<String> diagnostics = j.getDiagnostics();
if(diagnostics != null && !diagnostics.isEmpty()) {
StringBuffer b = new StringBuffer();
for(String diag: diagnostics) {
@@ -124,18 +107,17 @@ public class HsJobBlock extends HtmlBloc
infoBlock._("Diagnostics:", b.toString());
}
- if(numMaps > 0) {
- infoBlock._("Average Map Time", StringUtils.formatTime(avgMapTime));
+ if(job.getNumMaps() > 0) {
+ infoBlock._("Average Map Time", StringUtils.formatTime(job.getAvgMapTime()));
}
- if(numReduces > 0) {
- infoBlock._("Average Reduce Time", StringUtils.formatTime(avgReduceTime));
- infoBlock._("Average Shuffle Time", StringUtils.formatTime(avgShuffleTime));
- infoBlock._("Average Merge Time", StringUtils.formatTime(avgSortTime));
+ if(job.getNumReduces() > 0) {
+ infoBlock._("Average Reduce Time", StringUtils.formatTime(job.getAvgReduceTime()));
+ infoBlock._("Average Shuffle Time", StringUtils.formatTime(job.getAvgShuffleTime()));
+ infoBlock._("Average Merge Time", StringUtils.formatTime(job.getAvgMergeTime()));
}
- for(Map.Entry<JobACL, AccessControlList> entry : acls.entrySet()) {
- infoBlock._("ACL "+entry.getKey().getAclName()+":",
- entry.getValue().getAclString());
+ for (ConfEntryInfo entry : job.getAcls()) {
+ infoBlock._("ACL "+entry.getName()+":", entry.getValue());
}
DIV<Hamlet> div = html.
_(InfoBlock.class).
@@ -154,18 +136,14 @@ public class HsJobBlock extends HtmlBloc
th(_TH, "Logs").
_();
for (AMInfo amInfo : amInfos) {
- String nodeHttpAddress = amInfo.getNodeManagerHost() +
- ":" + amInfo.getNodeManagerHttpPort();
- NodeId nodeId = BuilderUtils.newNodeId(
- amInfo.getNodeManagerHost(), amInfo.getNodeManagerPort());
-
+ AMAttemptInfo attempt = new AMAttemptInfo(amInfo,
+ job.getId(), job.getUserName(), "", "");
table.tr().
- td(String.valueOf(amInfo.getAppAttemptId().getAttemptId())).
- td(new Date(amInfo.getStartTime()).toString()).
- td().a(".nodelink", url("http://", nodeHttpAddress),
- nodeHttpAddress)._().
- td().a(".logslink", url("logs", nodeId.toString(),
- amInfo.getContainerId().toString(), jid, job.getUserName()),
+ td(String.valueOf(attempt.getAttemptId())).
+ td(new Date(attempt.getStartTime()).toString()).
+ td().a(".nodelink", url("http://", attempt.getNodeHttpAddress()),
+ attempt.getNodeHttpAddress())._().
+ td().a(".logslink", url(attempt.getShortLogsLink()),
"logs")._().
_();
}
@@ -184,13 +162,13 @@ public class HsJobBlock extends HtmlBloc
tr(_ODD).
th().
a(url("tasks", jid, "m"), "Map")._().
- td(String.valueOf(mapTasks)).
- td(String.valueOf(mapTasksComplete))._().
+ td(String.valueOf(String.valueOf(job.getMapsTotal()))).
+ td(String.valueOf(String.valueOf(job.getMapsCompleted())))._().
tr(_EVEN).
th().
a(url("tasks", jid, "r"), "Reduce")._().
- td(String.valueOf(reduceTasks)).
- td(String.valueOf(reducesTasksComplete))._()
+ td(String.valueOf(String.valueOf(job.getReducesTotal()))).
+ td(String.valueOf(String.valueOf(job.getReducesCompleted())))._()
._().
// Attempts table
@@ -204,99 +182,27 @@ public class HsJobBlock extends HtmlBloc
th("Maps").
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.FAILED.toString()),
- String.valueOf(failedMapAttempts))._().
+ String.valueOf(job.getFailedMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.KILLED.toString()),
- String.valueOf(killedMapAttempts))._().
+ String.valueOf(job.getKilledMapAttempts()))._().
td().a(url("attempts", jid, "m",
TaskAttemptStateUI.SUCCESSFUL.toString()),
- String.valueOf(successfulMapAttempts))._().
+ String.valueOf(job.getSuccessfulMapAttempts()))._().
_().
tr(_EVEN).
th("Reduces").
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.FAILED.toString()),
- String.valueOf(failedReduceAttempts))._().
+ String.valueOf(job.getFailedReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.KILLED.toString()),
- String.valueOf(killedReduceAttempts))._().
+ String.valueOf(job.getKilledReduceAttempts()))._().
td().a(url("attempts", jid, "r",
TaskAttemptStateUI.SUCCESSFUL.toString()),
- String.valueOf(successfulReduceAttempts))._().
+ String.valueOf(job.getSuccessfulReduceAttempts()))._().
_().
_().
_();
}
-
- /**
- * Go through a job and update the member variables with counts for
- * information to output in the page.
- * @param job the job to get counts for.
- */
- private void countTasksAndAttempts(Job job) {
- numReduces = 0;
- numMaps = 0;
- Map<TaskId, Task> tasks = job.getTasks();
- for (Task task : tasks.values()) {
- // Attempts counts
- Map<TaskAttemptId, TaskAttempt> attempts = task.getAttempts();
- for (TaskAttempt attempt : attempts.values()) {
-
- int successful = 0, failed = 0, killed =0;
-
- if (TaskAttemptStateUI.NEW.correspondsTo(attempt.getState())) {
- //Do Nothing
- } else if (TaskAttemptStateUI.RUNNING.correspondsTo(attempt
- .getState())) {
- //Do Nothing
- } else if (TaskAttemptStateUI.SUCCESSFUL.correspondsTo(attempt
- .getState())) {
- ++successful;
- } else if (TaskAttemptStateUI.FAILED
- .correspondsTo(attempt.getState())) {
- ++failed;
- } else if (TaskAttemptStateUI.KILLED
- .correspondsTo(attempt.getState())) {
- ++killed;
- }
-
- switch (task.getType()) {
- case MAP:
- successfulMapAttempts += successful;
- failedMapAttempts += failed;
- killedMapAttempts += killed;
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- numMaps++;
- avgMapTime += (attempt.getFinishTime() -
- attempt.getLaunchTime());
- }
- break;
- case REDUCE:
- successfulReduceAttempts += successful;
- failedReduceAttempts += failed;
- killedReduceAttempts += killed;
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- numReduces++;
- avgShuffleTime += (attempt.getShuffleFinishTime() -
- attempt.getLaunchTime());
- avgSortTime += attempt.getSortFinishTime() -
- attempt.getLaunchTime();
- avgReduceTime += (attempt.getFinishTime() -
- attempt.getShuffleFinishTime());
- }
- break;
- }
- }
- }
-
- if(numMaps > 0) {
- avgMapTime = avgMapTime / numMaps;
- }
-
- if(numReduces > 0) {
- avgReduceTime = avgReduceTime / numReduces;
- avgShuffleTime = avgShuffleTime / numReduces;
- avgSortTime = avgSortTime / numReduces;
- }
- }
}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsJobsBlock.java Tue Dec 13 23:05:56 2011
@@ -21,10 +21,9 @@ package org.apache.hadoop.mapreduce.v2.h
import java.text.SimpleDateFormat;
import java.util.Date;
-import org.apache.hadoop.mapreduce.v2.api.records.JobReport;
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.job.Job;
-import org.apache.hadoop.mapreduce.v2.util.MRApps;
+import org.apache.hadoop.mapreduce.v2.hs.webapp.dao.JobInfo;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TABLE;
import org.apache.hadoop.yarn.webapp.hamlet.Hamlet.TBODY;
@@ -38,8 +37,8 @@ import com.google.inject.Inject;
*/
public class HsJobsBlock extends HtmlBlock {
final AppContext appContext;
- static final SimpleDateFormat dateFormat =
- new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
+ static final SimpleDateFormat dateFormat =
+ new SimpleDateFormat("yyyy.MM.dd HH:mm:ss z");
@Inject HsJobsBlock(AppContext appCtx) {
appContext = appCtx;
@@ -68,28 +67,21 @@ public class HsJobsBlock extends HtmlBlo
th("Reduces Completed")._()._().
tbody();
LOG.info("Getting list of all Jobs.");
- for (Job job : appContext.getAllJobs().values()) {
- String jobID = MRApps.toString(job.getID());
- JobReport report = job.getReport();
- String mapsTotal = String.valueOf(job.getTotalMaps());
- String mapsCompleted = String.valueOf(job.getCompletedMaps());
- String reduceTotal = String.valueOf(job.getTotalReduces());
- String reduceCompleted = String.valueOf(job.getCompletedReduces());
- long startTime = report.getStartTime();
- long finishTime = report.getFinishTime();
+ for (Job j : appContext.getAllJobs().values()) {
+ JobInfo job = new JobInfo(j);
tbody.
tr().
- td(dateFormat.format(new Date(startTime))).
- td(dateFormat.format(new Date(finishTime))).
- td().a(url("job", jobID), jobID)._().
- td(job.getName().toString()).
+ td(dateFormat.format(new Date(job.getStartTime()))).
+ td(dateFormat.format(new Date(job.getFinishTime()))).
+ td().a(url("job", job.getId()), job.getId())._().
+ td(job.getName()).
td(job.getUserName()).
td(job.getQueueName()).
- td(job.getState().toString()).
- td(mapsTotal).
- td(mapsCompleted).
- td(reduceTotal).
- td(reduceCompleted)._();
+ td(job.getState()).
+ td(String.valueOf(job.getMapsTotal())).
+ td(String.valueOf(job.getMapsCompleted())).
+ td(String.valueOf(job.getReducesTotal())).
+ td(String.valueOf(job.getReducesCompleted()))._();
}
tbody._().
tfoot().
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsTasksBlock.java Tue Dec 13 23:05:56 2011
@@ -20,12 +20,13 @@ package org.apache.hadoop.mapreduce.v2.h
import static org.apache.hadoop.mapreduce.v2.app.webapp.AMParams.TASK_TYPE;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskAttemptState;
-import org.apache.hadoop.mapreduce.v2.api.records.TaskReport;
import org.apache.hadoop.mapreduce.v2.api.records.TaskType;
import org.apache.hadoop.mapreduce.v2.app.job.Task;
import org.apache.hadoop.mapreduce.v2.app.job.TaskAttempt;
import org.apache.hadoop.mapreduce.v2.app.webapp.App;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.ReduceTaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskAttemptInfo;
+import org.apache.hadoop.mapreduce.v2.app.webapp.dao.TaskInfo;
import org.apache.hadoop.mapreduce.v2.util.MRApps;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.yarn.util.Times;
@@ -65,7 +66,7 @@ public class HsTasksBlock extends HtmlBl
if (!symbol.isEmpty()) {
type = MRApps.taskType(symbol);
}
-
+
THEAD<TABLE<Hamlet>> thead = html.table("#tasks").thead();
//Create the spanning row
int attemptColSpan = type == TaskType.REDUCE ? 8 : 3;
@@ -74,7 +75,7 @@ public class HsTasksBlock extends HtmlBl
th().$colspan(attemptColSpan).$class("ui-state-default").
_("Successful Attempt")._().
_();
-
+
TR<THEAD<TABLE<Hamlet>>> theadRow = thead.
tr().
th("Name").
@@ -83,33 +84,33 @@ public class HsTasksBlock extends HtmlBl
th("Finish Time").
th("Elapsed Time").
th("Start Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Shuffle Finish Time"); //Attempt
theadRow.th("Merge Finish Time"); //Attempt
}
-
+
theadRow.th("Finish Time"); //Attempt
-
+
if(type == TaskType.REDUCE) {
theadRow.th("Elapsed Time Shuffle"); //Attempt
theadRow.th("Elapsed Time Merge"); //Attempt
theadRow.th("Elapsed Time Reduce"); //Attempt
}
theadRow.th("Elapsed Time"); //Attempt
-
+
TBODY<TABLE<Hamlet>> tbody = theadRow._()._().tbody();
for (Task task : app.getJob().getTasks().values()) {
if (type != null && task.getType() != type) {
continue;
}
- String tid = MRApps.toString(task.getID());
-
- TaskReport report = task.getReport();
- long startTime = report.getStartTime();
- long finishTime = report.getFinishTime();
- long elapsed = Times.elapsed(startTime, finishTime, false);
-
+ TaskInfo info = new TaskInfo(task);
+ String tid = info.getId();
+
+ long startTime = info.getStartTime();
+ long finishTime = info.getFinishTime();
+ long elapsed = info.getElapsedTime();
+
long attemptStartTime = -1;
long shuffleFinishTime = -1;
long sortFinishTime = -1;
@@ -118,30 +119,31 @@ public class HsTasksBlock extends HtmlBl
long elapsedSortTime = -1;;
long elapsedReduceTime = -1;
long attemptElapsed = -1;
- TaskAttempt successful = getSuccessfulAttempt(task);
+ TaskAttempt successful = info.getSuccessful();
if(successful != null) {
- attemptStartTime = successful.getLaunchTime();
- attemptFinishTime = successful.getFinishTime();
+ TaskAttemptInfo ta;
if(type == TaskType.REDUCE) {
- shuffleFinishTime = successful.getShuffleFinishTime();
- sortFinishTime = successful.getSortFinishTime();
- elapsedShuffleTime =
- Times.elapsed(attemptStartTime, shuffleFinishTime, false);
- elapsedSortTime =
- Times.elapsed(shuffleFinishTime, sortFinishTime, false);
- elapsedReduceTime =
- Times.elapsed(sortFinishTime, attemptFinishTime, false);
+ ReduceTaskAttemptInfo rta = new ReduceTaskAttemptInfo(successful, type);
+ shuffleFinishTime = rta.getShuffleFinishTime();
+ sortFinishTime = rta.getMergeFinishTime();
+ elapsedShuffleTime = rta.getElapsedShuffleTime();
+ elapsedSortTime = rta.getElapsedMergeTime();
+ elapsedReduceTime = rta.getElapsedReduceTime();
+ ta = rta;
+ } else {
+ ta = new TaskAttemptInfo(successful, type, false);
}
- attemptElapsed =
- Times.elapsed(attemptStartTime, attemptFinishTime, false);
+ attemptStartTime = ta.getStartTime();
+ attemptFinishTime = ta.getFinishTime();
+ attemptElapsed = ta.getElapsedTime();
}
-
+
TR<TBODY<TABLE<Hamlet>>> row = tbody.tr();
row.
td().
- br().$title(String.valueOf(task.getID().getId()))._(). // sorting
+ br().$title(String.valueOf(info.getTaskNum()))._(). // sorting
a(url("task", tid), tid)._().
- td(report.getTaskState().toString()).
+ td(info.getState()).
td().
br().$title(String.valueOf(startTime))._().
_(Times.format(startTime))._().
@@ -166,7 +168,7 @@ public class HsTasksBlock extends HtmlBl
td().
br().$title(String.valueOf(attemptFinishTime))._().
_(Times.format(attemptFinishTime))._();
-
+
if(type == TaskType.REDUCE) {
row.td().
br().$title(String.valueOf(elapsedShuffleTime))._().
@@ -178,7 +180,7 @@ public class HsTasksBlock extends HtmlBl
br().$title(String.valueOf(elapsedReduceTime))._().
_(formatTime(elapsedReduceTime))._();
}
-
+
row.td().
br().$title(String.valueOf(attemptElapsed))._().
_(formatTime(attemptElapsed))._();
@@ -194,7 +196,7 @@ public class HsTasksBlock extends HtmlBl
.$type(InputType.text).$name("elapsed_time").$value("Elapsed Time")._()
._().th().input("search_init").$type(InputType.text)
.$name("attempt_start_time").$value("Start Time")._()._();
-
+
if(type == TaskType.REDUCE) {
footRow.th().input("search_init").$type(InputType.text)
.$name("shuffle_time").$value("Shuffle Time")._()._();
@@ -216,20 +218,12 @@ public class HsTasksBlock extends HtmlBl
footRow.th().input("search_init").$type(InputType.text)
.$name("attempt_elapsed").$value("Elapsed Time")._()._();
-
+
footRow._()._()._();
}
private String formatTime(long elapsed) {
return elapsed < 0 ? "N/A" : StringUtils.formatTime(elapsed);
}
-
- private TaskAttempt getSuccessfulAttempt(Task task) {
- for(TaskAttempt attempt: task.getAttempts().values()) {
- if(attempt.getState() == TaskAttemptState.SUCCEEDED) {
- return attempt;
- }
- }
- return null;
- }
+
}
Modified: hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java
URL: http://svn.apache.org/viewvc/hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java?rev=1213975&r1=1213974&r2=1213975&view=diff
==============================================================================
--- hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java (original)
+++ hadoop/common/trunk/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-hs/src/main/java/org/apache/hadoop/mapreduce/v2/hs/webapp/HsWebApp.java Tue Dec 13 23:05:56 2011
@@ -27,6 +27,7 @@ import static org.apache.hadoop.yarn.web
import org.apache.hadoop.mapreduce.v2.app.AppContext;
import org.apache.hadoop.mapreduce.v2.app.webapp.AMParams;
import org.apache.hadoop.mapreduce.v2.hs.HistoryContext;
+import org.apache.hadoop.yarn.webapp.GenericExceptionHandler;
import org.apache.hadoop.yarn.webapp.WebApp;
public class HsWebApp extends WebApp implements AMParams {
@@ -39,6 +40,9 @@ public class HsWebApp extends WebApp imp
@Override
public void setup() {
+ bind(HsWebServices.class);
+ bind(JAXBContextResolver.class);
+ bind(GenericExceptionHandler.class);
bind(AppContext.class).toInstance(history);
route("/", HsController.class);
route("/app", HsController.class);