You are viewing a plain text version of this content. The canonical link for it is here.
Posted to common-commits@hadoop.apache.org by om...@apache.org on 2011/03/04 05:32:46 UTC

svn commit: r1077590 [1/2] - in /hadoop/common/branches/branch-0.20-security-patches: ./ bin/ src/core/org/apache/hadoop/http/ src/mapred/org/apache/hadoop/mapred/ src/test/org/apache/hadoop/mapred/ src/webapps/history/ src/webapps/job/

Author: omalley
Date: Fri Mar  4 04:32:45 2011
New Revision: 1077590

URL: http://svn.apache.org/viewvc?rev=1077590&view=rev
Log:
commit cadb769b4fb37e0fabf3947f9bcf18e4c7bd0516
Author: Devaraj Das <dd...@yahoo-inc.com>
Date:   Fri Jul 23 10:46:46 2010 -0700

    MAPREDUCE:291 from
    
    +++ b/YAHOO-CHANGES.txt
    +    MAPREDUCE-291. Optionally a separate daemon should serve JobHistory.
    +    (Srikanth Sundarrajan via ddas)
    +

Added:
    hadoop/common/branches/branch-0.20-security-patches/bin/start-jobhistoryserver.sh
    hadoop/common/branches/branch-0.20-security-patches/bin/stop-jobhistoryserver.sh
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistoryServer.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistoryServer.java
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/analysejobhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/job_authorization_error.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobconf_history.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobdetailshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobhistoryhome.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobtaskshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/legacyjobhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/loadhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/taskdetailshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/taskstatshistory.jsp
Removed:
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/analysejobhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobconf_history.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobdetailshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobtaskshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/legacyjobhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/loadhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/taskdetailshistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/taskstatshistory.jsp
Modified:
    hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
    hadoop/common/branches/branch-0.20-security-patches/build.xml
    hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JSPUtil.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java
    hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java
    hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestWebUIAuthorization.java
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobdetails.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobhistory.jsp
    hadoop/common/branches/branch-0.20-security-patches/src/webapps/job/jobtracker.jsp

Modified: hadoop/common/branches/branch-0.20-security-patches/bin/hadoop
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/hadoop?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/hadoop (original)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/hadoop Fri Mar  4 04:32:45 2011
@@ -74,6 +74,7 @@ if [ $# = 0 ]; then
   echo "  jobtracker           run the MapReduce job Tracker node" 
   echo "  pipes                run a Pipes job"
   echo "  tasktracker          run a MapReduce task Tracker node" 
+  echo "  historyserver        run job history servers as a standalone daemon"
   echo "  job                  manipulate MapReduce jobs"
   echo "  queue                get information regarding JobQueues" 
   echo "  version              print the version"
@@ -244,6 +245,9 @@ elif [ "$COMMAND" = "fetchdt" ] ; then
 elif [ "$COMMAND" = "jobtracker" ] ; then
   CLASS=org.apache.hadoop.mapred.JobTracker
   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOBTRACKER_OPTS"
+elif [ "$COMMAND" = "historyserver" ] ; then
+  CLASS=org.apache.hadoop.mapred.JobHistoryServer
+  HADOOP_OPTS="$HADOOP_OPTS $HADOOP_JOB_HISTORYSERVER_OPTS"
 elif [ "$COMMAND" = "tasktracker" ] ; then
   CLASS=org.apache.hadoop.mapred.TaskTracker
   HADOOP_OPTS="$HADOOP_OPTS $HADOOP_TASKTRACKER_OPTS"

Added: hadoop/common/branches/branch-0.20-security-patches/bin/start-jobhistoryserver.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/start-jobhistoryserver.sh?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/start-jobhistoryserver.sh (added)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/start-jobhistoryserver.sh Fri Mar  4 04:32:45 2011
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Start hadoop job history daemons.  Run this on node where history server need to run
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hadoop-config.sh
+
+# start daemon
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR start historyserver

Added: hadoop/common/branches/branch-0.20-security-patches/bin/stop-jobhistoryserver.sh
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/bin/stop-jobhistoryserver.sh?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/bin/stop-jobhistoryserver.sh (added)
+++ hadoop/common/branches/branch-0.20-security-patches/bin/stop-jobhistoryserver.sh Fri Mar  4 04:32:45 2011
@@ -0,0 +1,27 @@
+#!/usr/bin/env bash
+
+# Licensed to the Apache Software Foundation (ASF) under one or more
+# contributor license agreements.  See the NOTICE file distributed with
+# this work for additional information regarding copyright ownership.
+# The ASF licenses this file to You under the Apache License, Version 2.0
+# (the "License"); you may not use this file except in compliance with
+# the License.  You may obtain a copy of the License at
+#
+#     http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+
+# Stop hadoop job history daemon.  Run this on the node where history server is running
+
+bin=`dirname "$0"`
+bin=`cd "$bin"; pwd`
+
+. "$bin"/hadoop-config.sh
+
+"$bin"/hadoop-daemon.sh --config $HADOOP_CONF_DIR stop historyserver
+

Modified: hadoop/common/branches/branch-0.20-security-patches/build.xml
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/build.xml?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/build.xml (original)
+++ hadoop/common/branches/branch-0.20-security-patches/build.xml Fri Mar  4 04:32:45 2011
@@ -325,6 +325,7 @@
     <mkdir dir="${build.src}"/>
     <mkdir dir="${build.webapps}/task/WEB-INF"/>
     <mkdir dir="${build.webapps}/job/WEB-INF"/>
+    <mkdir dir="${build.webapps}/history/WEB-INF"/>
     <mkdir dir="${build.webapps}/hdfs/WEB-INF"/>
     <mkdir dir="${build.webapps}/datanode/WEB-INF"/>
     <mkdir dir="${build.webapps}/secondary/WEB-INF"/>
@@ -442,6 +443,13 @@
      webxml="${build.webapps}/job/WEB-INF/web.xml">
     </jsp-compile>
 
+    <jsp-compile
+     uriroot="${src.webapps}/history"
+     outputdir="${build.src}"
+     package="org.apache.hadoop.mapred"
+     webxml="${build.webapps}/history/WEB-INF/web.xml">
+    </jsp-compile>
+
     <!-- Compile Java files (excluding JSPs) checking warnings -->
     <javac 
      encoding="${build.encoding}" 

Modified: hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/core/org/apache/hadoop/http/HttpServer.java Fri Mar  4 04:32:45 2011
@@ -267,6 +267,36 @@ public class HttpServer implements Filte
     defaultContexts.put(ctxt, isFiltered);
   }
 
+  public WebAppContext addContext(String name, boolean isFiltered)
+      throws IOException {
+    if (0 == webServer.getHandlers().length) {
+      throw new RuntimeException("Couldn't find handler");
+    }
+    WebAppContext webAppCtx = new WebAppContext();
+    webAppCtx.setDisplayName("AppContext-" + name);
+    webAppCtx.setContextPath("/" + name);
+    webAppCtx.setWar(getWebAppsPath() + "/" + name);
+    setContextAttributes(webAppCtx);
+    addContext(webAppCtx, isFiltered);
+
+    if (isFiltered) {
+      defineFilter(webAppCtx, "krb5Filter",
+          Krb5AndCertsSslSocketConnector.Krb5SslFilter.class.getName(),
+          null, null);
+
+      defineFilter(webAppCtx, "safety", QuotingInputFilter.class.getName(), null,
+          new String[] {"/*"});
+
+      final FilterInitializer[] initializers = getFilterInitializers(conf);
+      if (initializers != null) {
+        for(FilterInitializer c : initializers) {
+          c.initFilter(this, conf);
+        }
+      }
+    }
+    return webAppCtx;
+  }
+
   /**
    * Add a context 
    * @param pathSpec The path spec for the context
@@ -291,7 +321,18 @@ public class HttpServer implements Filte
    * @param value The value of the attribute
    */
   public void setAttribute(String name, Object value) {
-    webAppContext.setAttribute(name, value);
+    setAttribute(webAppContext, name, value);
+  }
+
+  /**
+   * Set a value in the webapp context. These values are available to the jsp
+   * pages as "application.getAttribute(name)".
+   * @param context Context to add attribute
+   * @param name The name of the attribute
+   * @param value The value of the attribute
+   */
+  public void setAttribute(Context context, String name, Object value) {
+    context.setAttribute(name, value);
   }
 
   /**

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JSPUtil.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JSPUtil.java?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JSPUtil.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JSPUtil.java Fri Mar  4 04:32:45 2011
@@ -402,7 +402,8 @@ class JSPUtil {
             "<td id=\"job_" + rowId + "\">" + 
             
               (historyFileUrl == null ? "" :
-              "<a href=\"jobdetailshistory.jsp?logFile=" + historyFileUrl + "\">") + 
+              "<a href=\"" + JobHistoryServer.getHistoryUrlPrefix(tracker.conf) +
+                  "/jobdetailshistory.jsp?logFile=" + historyFileUrl + "\">") +
               
               info.status.getJobId() + "</a></td>" +
             
@@ -446,12 +447,11 @@ class JSPUtil {
    * 
    * @param logFile
    * @param fs
-   * @param jobTracker
    * @return JobInfo
    * @throws IOException
    */
   static JobInfo getJobInfo(Path logFile, FileSystem fs,
-      JobTracker jobTracker, String user) throws IOException {
+      JobConf jobConf, ACLsManager acLsManager, String user) throws IOException {
     String jobid = getJobID(logFile.getName());
     JobInfo jobInfo = null;
     synchronized(jobHistoryCache) {
@@ -465,7 +465,7 @@ class JSPUtil {
       }
       jobHistoryCache.put(jobid, jobInfo);
       int CACHE_SIZE = 
-        jobTracker.conf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5);
+        jobConf.getInt("mapred.job.tracker.jobhistory.lru.cache.size", 5);
       if (jobHistoryCache.size() > CACHE_SIZE) {
         Iterator<Map.Entry<String, JobInfo>> it = 
           jobHistoryCache.entrySet().iterator();
@@ -483,7 +483,7 @@ class JSPUtil {
     }
 
     // Authorize the user for view access of this job
-    jobTracker.getACLsManager().checkAccess(jobid, currentUser,
+    acLsManager.checkAccess(jobid, currentUser,
         jobInfo.getJobQueue(), Operation.VIEW_JOB_DETAILS,
         jobInfo.get(Keys.USER), jobInfo.getJobACLs().get(JobACL.VIEW_JOB));
 
@@ -495,7 +495,6 @@ class JSPUtil {
    * 
    * @param request
    * @param response
-   * @param jobTracker
    * @param fs
    * @param logFile
    * @return the job if authorization is disabled or if the authorization checks
@@ -505,29 +504,32 @@ class JSPUtil {
    * @throws ServletException
    */
   static JobInfo checkAccessAndGetJobInfo(HttpServletRequest request,
-      HttpServletResponse response, final JobTracker jobTracker,
-      final FileSystem fs, final Path logFile) throws IOException,
+      HttpServletResponse response, final JobConf jobConf,
+      final ACLsManager acLsManager, final FileSystem fs,
+      final Path logFile) throws IOException,
       InterruptedException, ServletException {
     String jobid = getJobID(logFile.getName());
     String user = request.getRemoteUser();
     JobInfo job = null;
     if (user != null) {
       try {
-        job = JSPUtil.getJobInfo(logFile, fs, jobTracker, user);
+        job = JSPUtil.getJobInfo(logFile, fs, jobConf, acLsManager, user);
       } catch (AccessControlException e) {
+        String trackerAddress = jobConf.get("mapred.job.tracker.http.address");
         String errMsg =
             String.format(
                 "User %s failed to view %s!<br><br>%s"
                     + "<hr>"
                     + "<a href=\"jobhistory.jsp\">Go back to JobHistory</a><br>"
-                    + "<a href=\"jobtracker.jsp\">Go back to JobTracker</a>",
+                    + "<a href=\"http://" + trackerAddress +
+                    "/jobtracker.jsp\">Go back to JobTracker</a>",
                 user, jobid, e.getMessage());
         JSPUtil.setErrorAndForward(errMsg, request, response);
         return null;
       }
     } else {
       // no authorization needed
-      job = JSPUtil.getJobInfo(logFile, fs, jobTracker, null);
+      job = JSPUtil.getJobInfo(logFile, fs, jobConf, acLsManager, null);
     }
     return job;
   }
@@ -555,17 +557,19 @@ class JSPUtil {
       Map<JobACL, AccessControlList> jobAcls, JspWriter out)
       throws IOException {
     if (tracker.areACLsEnabled()) {
-      // Display job-view-acls and job-modify-acls configured for this job
-      out.print("<b>Job-ACLs:</b><br>");
-      for (JobACL aclName : JobACL.values()) {
-        String aclConfigName = aclName.getAclName();
-        AccessControlList aclConfigured = jobAcls.get(aclName);
-        if (aclConfigured != null) {
-          String aclStr = aclConfigured.toString();
-          out.print("&nbsp;&nbsp;&nbsp;&nbsp;" + aclConfigName + ": "
-              + aclStr + "<br>");
-        }
-      }
+      printJobACLsInternal(jobAcls, out);
+    }
+    else {
+      out.print("<b>Job-ACLs: " + new AccessControlList("*").toString()
+          + "</b><br>");
+    }
+  }
+
+  static void printJobACLs(JobConf conf,
+      Map<JobACL, AccessControlList> jobAcls, JspWriter out)
+      throws IOException {
+    if (conf.getBoolean(JobConf.MR_ACLS_ENABLED, false)) {
+      printJobACLsInternal(jobAcls, out);
     }
     else {
       out.print("<b>Job-ACLs: " + new AccessControlList("*").toString()
@@ -573,6 +577,21 @@ class JSPUtil {
     }
   }
 
+  private static void printJobACLsInternal(Map<JobACL, AccessControlList> jobAcls,
+                                           JspWriter out) throws IOException {
+    // Display job-view-acls and job-modify-acls configured for this job
+    out.print("<b>Job-ACLs:</b><br>");
+    for (JobACL aclName : JobACL.values()) {
+      String aclConfigName = aclName.getAclName();
+      AccessControlList aclConfigured = jobAcls.get(aclName);
+      if (aclConfigured != null) {
+        String aclStr = aclConfigured.toString();
+        out.print("&nbsp;&nbsp;&nbsp;&nbsp;" + aclConfigName + ": "
+            + aclStr + "<br>");
+      }
+    }
+  }
+
   static boolean privateActionsAllowed(JobConf conf) {
     return conf.getBoolean(PRIVATE_ACTIONS_KEY, false);
   }

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistory.java Fri Mar  4 04:32:45 2011
@@ -518,6 +518,12 @@ public class JobHistory {
   }
 
   static void initDone(JobConf conf, FileSystem fs) throws IOException {
+    initDone(conf, fs, true);
+  }
+
+  static void initDone(JobConf conf, FileSystem fs,
+                                     boolean setup)
+      throws IOException {
     //if completed job history location is set, use that
     String doneLocation = conf.
                      get("mapred.job.tracker.history.completed.location");
@@ -529,6 +535,10 @@ public class JobHistory {
       DONEDIR_FS = LOGDIR_FS;
     }
 
+    if (!setup) {
+        return;
+    }
+
     //If not already present create the done folder with appropriate 
     //permission
     if (!DONEDIR_FS.exists(DONE)) {

Added: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistoryServer.java?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistoryServer.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobHistoryServer.java Fri Mar  4 04:32:45 2011
@@ -0,0 +1,257 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.http.HttpServer;
+import org.apache.hadoop.net.NetUtils;
+import org.apache.hadoop.security.SecurityUtil;
+import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.util.StringUtils;
+import org.mortbay.jetty.webapp.WebAppContext;
+
+import java.io.IOException;
+import java.net.InetSocketAddress;
+import java.net.URL;
+import java.security.PrivilegedExceptionAction;
+
+/******************************************************************
+ * {@link JobHistoryServer} is responsible for servicing all job history
+ * related requests from client.
+ *
+ * History Server can be initialized in one of two modes
+ *   * Emdedded within {@link JobTracker}
+ *   * External daemon, can either be run on the job tracker node or outside
+ *
+ * Two {@link Configuration} entries in mapred-site.xml govern the functioning
+ * of the history server
+ *
+ * mapred.job.history.server.http.address is address to which history web
+ *  server is bound to. If operating in embedded mode, the hostname on
+ *  history address has to be same as the job tracker host name
+ *
+ * mapred.job.history.server.embedded (default is true) will cause job tracker
+ *  to init history server, else the server need to be started as a
+ *  separate daemon process
+ *****************************************************************/
+public class JobHistoryServer {
+  private static final Log LOG = LogFactory.getLog(JobHistoryServer.class);
+
+  static{
+    Configuration.addDefaultResource("mapred-default.xml");
+    Configuration.addDefaultResource("mapred-site.xml");
+  }
+
+  private static final String JH_USER_NAME =
+      "mapreduce.jobhistory.kerberos.principal";
+  private static final String JH_KEYTAB_FILE =
+      "mapreduce.jobhistory.keytab.file";
+  public static final String MAPRED_HISTORY_SERVER_HTTP_ADDRESS =
+      "mapreduce.history.server.http.address";
+  public static final String MAPRED_HISTORY_SERVER_EMBEDDED =
+      "mapreduce.history.server.embedded";
+
+  private HttpServer historyServer;
+  private JobConf conf;
+  private String historyInfoAddr;
+  private WebAppContext context;
+
+  /**
+   * Starts job history server as a independent process
+   *  * Initializes ACL Manager
+   *  * Starts a webapp to service history requests
+   *
+   * @param conf - Mr Cluster configuration
+   * @throws IOException - any exception starting history server
+   */
+  public JobHistoryServer(JobConf conf) throws IOException {
+
+    if (isEmbedded(conf)) {
+      throw new IllegalStateException("History server is configured to run " +
+          "within JobTracker. Aborting..");
+    }
+
+    historyInfoAddr = getBindAddress(conf);
+    login(conf);
+    ACLsManager aclsManager = initializeACLsManager(conf);
+    historyServer = initializeWebContainer(conf, aclsManager);
+    initializeWebServer(conf, aclsManager);
+  }
+
+  /**
+   * Starts job history server as a embedded server within job tracker
+   *  * Starts a webapp to service history requests
+   *
+   * @param conf - MR Cluster configuration
+   * @param aclsManager - ACLs Manager for user authentication
+   * @param httpServer - Http Server instance
+   * @throws IOException - any exception starting history server
+   */
+  public JobHistoryServer(JobConf conf,
+                          ACLsManager aclsManager,
+                          HttpServer httpServer) throws IOException {
+    historyInfoAddr = getBindAddress(conf);
+    this.historyServer = httpServer;
+    initializeWebServer(conf, aclsManager);
+  }
+
+  private void login(JobConf conf) throws IOException {
+    UserGroupInformation.setConfiguration(conf);
+    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(historyInfoAddr);
+
+    SecurityUtil.login(conf, JH_KEYTAB_FILE, JH_USER_NAME, infoSocAddr.getHostName());
+    LOG.info("History server login successful");
+  }
+
+  private ACLsManager initializeACLsManager(JobConf conf)
+      throws IOException {
+    LOG.info("Initializing ACLs Manager");
+
+    Configuration queuesConf = new Configuration(conf);
+    QueueManager queueManager = new QueueManager(queuesConf);
+
+    return new ACLsManager(conf,
+        new JobACLsManager(conf), queueManager);
+  }
+
+  /**
+   * Start embedded jetty server to host history servlets/pages
+   *  - Push history file system, acl Manager and cluster conf for future
+   *    reference by the servlets/pages
+   *
+   * @param conf - Cluster configuration
+   * @param aclsManager - ACLs Manager for validating user request
+   * @throws IOException - Any exception while starting web server
+   */
+  private void initializeWebServer(final JobConf conf,
+                                            ACLsManager aclsManager)
+      throws IOException {
+
+    this.conf = conf;
+
+    FileSystem fs;
+    try {
+      fs = aclsManager.getMROwner().
+        doAs(new PrivilegedExceptionAction<FileSystem>() {
+        public FileSystem run() throws IOException {
+          return FileSystem.get(conf);
+      }});
+    } catch (InterruptedException e) {
+      throw new IOException("Operation interrupted", e);
+    }
+
+    if (!isEmbedded(conf)) {
+      JobHistory.initDone(conf, fs, false);
+    }
+    final String historyLogDir =
+      JobHistory.getCompletedJobHistoryLocation().toString();
+
+    context = historyServer.addContext("history", true);
+
+    historyServer.setAttribute(context, "historyLogDir", historyLogDir);
+    historyServer.setAttribute(context, "fileSys", fs);
+    historyServer.setAttribute(context, "jobConf", conf);
+    historyServer.setAttribute(context, "aclManager", aclsManager);
+
+    if (!isEmbedded(conf)) {
+      historyServer.setAttribute("historyLogDir", historyLogDir);
+      historyServer.setAttribute("fileSys", fs);
+      historyServer.setAttribute("jobConf", conf);
+      historyServer.setAttribute("aclManager", aclsManager);
+    }
+  }
+
+  private HttpServer initializeWebContainer(JobConf conf,
+                                      ACLsManager aclsManager)
+      throws IOException {
+    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(historyInfoAddr);
+    int tmpInfoPort = infoSocAddr.getPort();
+    return new HttpServer("/", infoSocAddr.getHostName(),
+        tmpInfoPort, tmpInfoPort == 0, conf, aclsManager.getAdminsAcl());
+  }
+
+  public void start() throws IOException {
+    if (!isEmbedded(conf)) {
+      historyServer.start();
+    } else {
+      try {
+        context.start();
+      } catch (Exception e) {
+        throw new IOException("Unable to start history context", e);
+      }
+    }
+
+    InetSocketAddress infoSocAddr = NetUtils.createSocketAddr(historyInfoAddr);
+    conf.set(MAPRED_HISTORY_SERVER_HTTP_ADDRESS, infoSocAddr.getHostName() +
+        ":" + historyServer.getPort());
+    LOG.info("Started job history server at: " + getAddress(conf));
+  }
+
+  public void join() throws InterruptedException {
+    historyServer.join();
+  }
+
+  /**
+   * Shutsdown the history server if already initialized
+   * @throws Exception - Any exception during shutdown
+   */
+  public void shutdown() throws Exception {
+    if (historyServer != null && !isEmbedded(conf)) {
+      LOG.info("Shutting down history server");
+      historyServer.stop();
+    }
+  }
+
+  /**
+   * Start job history server as an independent process
+   *
+   * @param args - Command line arguments
+   */
+  public static void main(String[] args) {
+    StringUtils.startupShutdownMessage(JobHistoryServer.class, args, LOG);
+
+    try {
+      JobHistoryServer server = new JobHistoryServer(new JobConf());
+      server.start();
+      server.join();
+    } catch (Throwable e) {
+      LOG.fatal(StringUtils.stringifyException(e));
+      System.exit(-1);
+    }
+  }
+
+  static boolean isEmbedded(JobConf conf) {
+    return conf.getBoolean(MAPRED_HISTORY_SERVER_EMBEDDED, true);
+  }
+
+  static String getAddress(JobConf conf) {
+    return conf.get(MAPRED_HISTORY_SERVER_HTTP_ADDRESS);
+  }
+
+  static String getHistoryUrlPrefix(JobConf conf) {
+    return "http://" + getAddress(conf) + "/history";
+  }
+
+  private static String getBindAddress(JobConf conf) {
+    return conf.get(MAPRED_HISTORY_SERVER_HTTP_ADDRESS, "localhost:0");
+  }
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/mapred/org/apache/hadoop/mapred/JobTracker.java Fri Mar  4 04:32:45 2011
@@ -1939,6 +1939,7 @@ public class JobTracker implements MRCon
   CompletedJobStatusStore completedJobStatusStore = null;
   Thread completedJobsStoreThread = null;
   RecoveryManager recoveryManager;
+  JobHistoryServer jobHistoryServer;
 
   /**
    * It might seem like a bug to maintain a TreeSet of tasktracker objects,
@@ -2267,6 +2268,15 @@ public class JobTracker implements MRCon
       }
     });
     infoServer.setAttribute("fileSys", historyFS);
+    infoServer.setAttribute("jobConf", conf);
+    infoServer.setAttribute("aclManager", aclsManager);
+
+    if (JobHistoryServer.isEmbedded(conf)) {
+      LOG.info("History server being initialized in embedded mode");
+      jobHistoryServer = new JobHistoryServer(conf, aclsManager, infoServer);
+      jobHistoryServer.start();
+      LOG.info("Job History Server web address: " + JobHistoryServer.getAddress(conf));
+    }
 
     this.dnsToSwitchMapping = ReflectionUtils.newInstance(
         conf.getClass("topology.node.switch.mapping.impl", ScriptBasedMapping.class,
@@ -2470,6 +2480,14 @@ public class JobTracker implements MRCon
         ex.printStackTrace();
       }
     }
+    if (jobHistoryServer != null) {
+      LOG.info("Stopping job history server");
+      try {
+        jobHistoryServer.shutdown();
+      } catch (Exception ex) {
+        LOG.warn("Exception shutting down Job History server", ex);
+      }
+  }
     DelegationTokenRenewal.close();
     LOG.info("stopped all jobtracker services");
     return;
@@ -2533,7 +2551,7 @@ public class JobTracker implements MRCon
       trackerToMarkedTasksMap.put(taskTracker, taskset);
     }
     taskset.add(taskid);
-      
+
     if (LOG.isDebugEnabled()) {
       LOG.debug("Marked '" + taskid + "' from '" + taskTracker + "'");
     }

Added: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistoryServer.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistoryServer.java?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistoryServer.java (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestJobHistoryServer.java Fri Mar  4 04:32:45 2011
@@ -0,0 +1,168 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.mapred;
+
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
+import org.apache.commons.httpclient.HttpClient;
+import org.apache.commons.httpclient.methods.GetMethod;
+import org.apache.hadoop.conf.Configuration;
+import org.apache.hadoop.hdfs.MiniDFSCluster;
+import org.apache.hadoop.io.LongWritable;
+import org.apache.hadoop.io.Text;
+import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.FSDataOutputStream;
+import org.apache.hadoop.mapred.lib.NullOutputFormat;
+import org.junit.Assert;
+import junit.framework.TestCase;
+
+import java.io.IOException;
+import java.io.InputStream;
+import java.net.HttpURLConnection;
+import java.net.URL;
+import java.net.MalformedURLException;
+
+public class TestJobHistoryServer extends TestCase {
+  private static final Log LOG = LogFactory.getLog(TestJobHistoryServer.class);
+
+  public void testHistoryServerEmbedded() {
+
+    MiniMRCluster mrCluster = null;
+    JobConf conf = new JobConf();
+    try {
+      conf.setLong("mapred.job.tracker.retiredjobs.cache.size", 1);
+      conf.setLong("mapred.jobtracker.retirejob.interval", 0);
+      conf.setLong("mapred.jobtracker.retirejob.check", 0);
+      conf.setLong("mapred.jobtracker.completeuserjobs.maximum", 0);
+      conf.set(JobHistoryServer.MAPRED_HISTORY_SERVER_HTTP_ADDRESS,
+          "localhost:0");
+
+      mrCluster = new MiniMRCluster(1, conf.get("fs.default.name"), 1,
+          null, null, conf);
+      String historyAddress = JobHistoryServer.getHistoryUrlPrefix(mrCluster.
+          getJobTrackerRunner().getJobTracker().conf);
+      LOG.info("******** History Address: " + historyAddress);
+
+      conf = mrCluster.createJobConf();
+      createInputFile(conf, "/tmp/input");
+
+      RunningJob job = runJob(conf);
+      LOG.info("Job details: " + job);
+
+      String redirectUrl = getRedirectUrl(job.getTrackingURL());
+      Assert.assertEquals(redirectUrl.contains(historyAddress), true);
+
+    } catch (IOException e) {
+      LOG.error("Failure running test", e);
+      Assert.fail(e.getMessage());
+    } finally {
+      if (mrCluster != null) mrCluster.shutdown();
+    }
+  }
+
+  public void testHistoryServerStandalone() {
+
+    MiniMRCluster mrCluster = null;
+    JobConf conf = new JobConf();
+    JobHistoryServer server = null;
+    try {
+      conf.setLong("mapred.job.tracker.retiredjobs.cache.size", 1);
+      conf.setLong("mapred.jobtracker.retirejob.interval", 0);
+      conf.setLong("mapred.jobtracker.retirejob.check", 0);
+      conf.setLong("mapred.jobtracker.completeuserjobs.maximum", 0);
+      conf.set(JobHistoryServer.MAPRED_HISTORY_SERVER_HTTP_ADDRESS,
+          "localhost:8090");
+      conf.setBoolean(JobHistoryServer.MAPRED_HISTORY_SERVER_EMBEDDED, false);
+
+      mrCluster = new MiniMRCluster(1, conf.get("fs.default.name"), 1,
+          null, null, conf);
+      server = new JobHistoryServer(conf);
+      server.start();
+
+      String historyAddress = JobHistoryServer.getHistoryUrlPrefix(conf);
+      LOG.info("******** History Address: " + historyAddress);
+
+      conf = mrCluster.createJobConf();
+      createInputFile(conf, "/tmp/input");
+
+      RunningJob job = runJob(conf);
+      LOG.info("Job details: " + job);
+
+      String redirectUrl = getRedirectUrl(job.getTrackingURL());
+      Assert.assertEquals(redirectUrl.contains(historyAddress), true);
+
+    } catch (IOException e) {
+      LOG.error("Failure running test", e);
+      Assert.fail(e.getMessage());
+    } finally {
+      if (mrCluster != null) mrCluster.shutdown();
+      try {
+        if (server != null) server.shutdown();
+      } catch (Exception ignore) { }
+    }
+  }
+
+  private void createInputFile(Configuration conf, String path)
+      throws IOException {
+    FileSystem fs = FileSystem.get(conf);
+    FSDataOutputStream out = fs.create(new Path(path));
+    try {
+      out.write("hello world".getBytes());
+    } finally {
+      out.close();
+    }
+  }
+
+  private synchronized RunningJob runJob(JobConf conf) throws IOException {
+    conf.setJobName("History");
+
+    conf.setInputFormat(TextInputFormat.class);
+
+    conf.setMapOutputKeyClass(LongWritable.class);
+    conf.setMapOutputValueClass(Text.class);
+
+    conf.setOutputFormat(NullOutputFormat.class);
+    conf.setOutputKeyClass(LongWritable.class);
+    conf.setOutputValueClass(Text.class);
+
+    conf.setMapperClass(org.apache.hadoop.mapred.lib.IdentityMapper.class);
+    conf.setReducerClass(org.apache.hadoop.mapred.lib.IdentityReducer.class);
+
+    FileInputFormat.setInputPaths(conf, "/tmp/input");
+
+    return JobClient.runJob(conf);
+  }
+
+  private String getRedirectUrl(String jobUrl) throws IOException {
+    HttpClient client = new HttpClient();
+    GetMethod method = new GetMethod(jobUrl);
+    method.setFollowRedirects(false);
+    try {
+      int status = client.executeMethod(method);
+      Assert.assertEquals(status, HttpURLConnection.HTTP_MOVED_TEMP);
+
+      LOG.info("Location: " + method.getResponseHeader("Location"));
+      return method.getResponseHeader("Location").getValue();
+    } finally {
+      method.releaseConnection();
+    }
+  }
+
+}

Modified: hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestWebUIAuthorization.java
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestWebUIAuthorization.java?rev=1077590&r1=1077589&r2=1077590&view=diff
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestWebUIAuthorization.java (original)
+++ hadoop/common/branches/branch-0.20-security-patches/src/test/org/apache/hadoop/mapred/TestWebUIAuthorization.java Fri Mar  4 04:32:45 2011
@@ -293,9 +293,23 @@ public class TestWebUIAuthorization exte
     MyGroupsProvider.mapping.put(mrAdminUser, Arrays.asList("group8"));
   }
 
-  public void testAuthorizationForJobHistoryPages() throws Exception {    
-    setupGroupsProvider();
+  public void testAuthorizationForJobHistoryPages() throws Exception {
+    checkAuthorizationForJobHistoryPages(new Properties());
+  }
+
+  public void testAuthorizationForJobHistoryPagesStandalone() throws Exception {
     Properties props = new Properties();
+    props.setProperty(JobHistoryServer.MAPRED_HISTORY_SERVER_HTTP_ADDRESS,
+        "localhost:8090");
+    props.setProperty(JobHistoryServer.MAPRED_HISTORY_SERVER_EMBEDDED,
+        "false");
+
+    checkAuthorizationForJobHistoryPages(props);
+  }
+
+  private void checkAuthorizationForJobHistoryPages(
+      Properties props) throws Exception {
+    setupGroupsProvider();
     props.setProperty("hadoop.http.filter.initializers",
         DummyFilterInitializer.class.getName());
 
@@ -321,6 +335,14 @@ public class TestWebUIAuthorization exte
     JobConf conf = new JobConf(cluster.createJobConf());
     conf.set(JobContext.JOB_ACL_VIEW_JOB, viewColleague + " group3");
 
+    //Initialize history server, if need to be started in standalone mode
+    if ("false".equals(props.getProperty(
+        JobHistoryServer.MAPRED_HISTORY_SERVER_EMBEDDED, "true"))) {
+      JobHistoryServer historyServer = new JobHistoryServer(cluster.
+          getJobTrackerRunner().getJobTracker().conf);
+      historyServer.start();
+    }
+
     // Let us add group1 and group3 to modify-job-acl. So modifyColleague and
     // viewAndModifyColleague will be able to modify the job
     conf.set(JobContext.JOB_ACL_MODIFY_JOB, " group1,group3");
@@ -359,16 +381,16 @@ public class TestWebUIAuthorization exte
         JobHistory.JobInfo.getDoneJobHistoryFileName(finalJobConf, jobid));
 
     String urlEncodedHistoryFileName = URLEncoder.encode(historyFilePath.toString());
-    String jtURL = "http://localhost:" + infoPort;
+    String jobHistoryUrl = JobHistoryServer.getHistoryUrlPrefix(jobTracker.conf);
 
     // validate access of jobdetails_history.jsp
-    String jobDetailsJSP =
-        jtURL + "/jobdetailshistory.jsp?logFile=" + urlEncodedHistoryFileName;
+    String jobDetailsJSP = jobHistoryUrl +
+            "/jobdetailshistory.jsp?logFile=" + urlEncodedHistoryFileName;
     validateViewJob(jobDetailsJSP, "GET");
 
     // validate accesses of jobtaskshistory.jsp
     String jobTasksJSP =
-        jtURL + "/jobtaskshistory.jsp?logFile=" + urlEncodedHistoryFileName;
+        jobHistoryUrl + "/jobtaskshistory.jsp?logFile=" + urlEncodedHistoryFileName;
     String[] taskTypes =
         new String[] { "JOb_SETUP", "MAP", "REDUCE", "JOB_CLEANUP" };
     String[] states =
@@ -392,7 +414,7 @@ public class TestWebUIAuthorization exte
 
     for (String tip : tipsMap.keySet()) {
       // validate access of taskdetailshistory.jsp
-      validateViewJob(jtURL + "/taskdetailshistory.jsp?logFile="
+      validateViewJob(jobHistoryUrl + "/taskdetailshistory.jsp?logFile="
           + urlEncodedHistoryFileName + "&tipid=" + tip.toString(), "GET");
 
       Map<String, TaskAttempt> attemptsMap =
@@ -400,7 +422,7 @@ public class TestWebUIAuthorization exte
       for (String attempt : attemptsMap.keySet()) {
 
         // validate access to taskstatshistory.jsp
-        validateViewJob(jtURL + "/taskstatshistory.jsp?attemptid="
+        validateViewJob(jobHistoryUrl + "/taskstatshistory.jsp?attemptid="
             + attempt.toString() + "&logFile=" + urlEncodedHistoryFileName, "GET");
 
         // validate access to tasklogs
@@ -447,12 +469,12 @@ public class TestWebUIAuthorization exte
 
     // validate access to analysejobhistory.jsp
     String analyseJobHistoryJSP =
-        jtURL + "/analysejobhistory.jsp?logFile=" + urlEncodedHistoryFileName;
+        jobHistoryUrl + "/analysejobhistory.jsp?logFile=" + urlEncodedHistoryFileName;
     validateViewJob(analyseJobHistoryJSP, "GET");
 
     // validate access of jobconf_history.jsp
     String jobConfJSP =
-        jtURL + "/jobconf_history.jsp?logFile=" + urlEncodedHistoryFileName;
+        jobHistoryUrl + "/jobconf_history.jsp?logFile=" + urlEncodedHistoryFileName;
     validateViewJob(jobConfJSP, "GET");
   }
 

Added: hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/analysejobhistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/analysejobhistory.jsp?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/analysejobhistory.jsp (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/analysejobhistory.jsp Fri Mar  4 04:32:45 2011
@@ -0,0 +1,266 @@
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="javax.servlet.http.*"
+  import="java.io.*"
+  import="java.util.*"
+  import="org.apache.hadoop.http.HtmlQuoting"
+  import="org.apache.hadoop.mapred.*"
+  import="org.apache.hadoop.fs.*"
+  import="org.apache.hadoop.util.*"
+  import="java.text.SimpleDateFormat"
+  import="org.apache.hadoop.mapred.JobHistory.*"
+%>
+
+<%!	private static SimpleDateFormat dateFormat 
+                              = new SimpleDateFormat("d/MM HH:mm:ss") ; 
+%>
+<%!	private static final long serialVersionUID = 1L;
+%>
+<html><body>
+<%
+  String logFile = request.getParameter("logFile");
+  if (logFile == null) {
+    out.println("Missing job!!");
+    return;
+  }
+  String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile);
+  String jobid = JSPUtil.getJobID(new Path(encodedLogFileName).getName());
+  String numTasks = request.getParameter("numTasks");
+  int showTasks = 10 ; 
+  if (numTasks != null) {
+    showTasks = Integer.parseInt(numTasks);  
+  }
+  FileSystem fs = (FileSystem) application.getAttribute("fileSys");
+  JobConf jobConf = (JobConf) application.getAttribute("jobConf");
+  ACLsManager aclsManager = (ACLsManager) application.getAttribute("aclManager");
+  JobHistory.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+      response, jobConf, aclsManager, fs, new Path(logFile));
+  if (job == null) {
+    return;
+  }%>
+<h2>Hadoop Job <a href="jobdetailshistory.jsp?logFile=<%=encodedLogFileName%>"><%=jobid %> </a></h2>
+<b>User : </b> <%=HtmlQuoting.quoteHtmlChars(job.get(Keys.USER)) %><br/> 
+<b>JobName : </b> <%=HtmlQuoting.quoteHtmlChars(job.get(Keys.JOBNAME)) %><br/> 
+<b>JobConf : </b> <%=job.get(Keys.JOBCONF) %><br/> 
+<b>Submitted At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 ) %><br/> 
+<b>Launched At : </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %><br/>
+<b>Finished At : </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %><br/>
+<b>Status : </b> <%= ((job.get(Keys.JOB_STATUS) == null)?"Incomplete" :job.get(Keys.JOB_STATUS)) %><br/> 
+<hr/>
+<center>
+<%
+  if (!Values.SUCCESS.name().equals(job.get(Keys.JOB_STATUS))) {
+    out.print("<h3>No Analysis available as job did not finish</h3>");
+    return;
+  }
+  Map<String, JobHistory.Task> tasks = job.getAllTasks();
+  int finishedMaps = job.getInt(Keys.FINISHED_MAPS)  ;
+  int finishedReduces = job.getInt(Keys.FINISHED_REDUCES) ;
+  JobHistory.Task [] mapTasks = new JobHistory.Task[finishedMaps]; 
+  JobHistory.Task [] reduceTasks = new JobHistory.Task[finishedReduces]; 
+  int mapIndex = 0 , reduceIndex=0; 
+  long avgMapTime = 0;
+  long avgReduceTime = 0;
+  long avgShuffleTime = 0;
+
+  for (JobHistory.Task task : tasks.values()) {
+    Map<String, TaskAttempt> attempts = task.getTaskAttempts();
+    for (JobHistory.TaskAttempt attempt : attempts.values()) {
+      if (attempt.get(Keys.TASK_STATUS).equals(Values.SUCCESS.name())) {
+        long avgFinishTime = (attempt.getLong(Keys.FINISH_TIME) -
+      		                attempt.getLong(Keys.START_TIME));
+        if (Values.MAP.name().equals(task.get(Keys.TASK_TYPE))) {
+          mapTasks[mapIndex++] = attempt ; 
+          avgMapTime += avgFinishTime;
+        } else if (Values.REDUCE.name().equals(task.get(Keys.TASK_TYPE))) { 
+          reduceTasks[reduceIndex++] = attempt;
+          avgShuffleTime += (attempt.getLong(Keys.SHUFFLE_FINISHED) - 
+                             attempt.getLong(Keys.START_TIME));
+          avgReduceTime += (attempt.getLong(Keys.FINISH_TIME) -
+                            attempt.getLong(Keys.SHUFFLE_FINISHED));
+        }
+        break;
+      }
+    }
+  }
+	 
+  if (finishedMaps > 0) {
+    avgMapTime /= finishedMaps;
+  }
+  if (finishedReduces > 0) {
+    avgReduceTime /= finishedReduces;
+    avgShuffleTime /= finishedReduces;
+  }
+  Comparator<JobHistory.Task> cMap = new Comparator<JobHistory.Task>(){
+    public int compare(JobHistory.Task t1, JobHistory.Task t2){
+      long l1 = t1.getLong(Keys.FINISH_TIME) - t1.getLong(Keys.START_TIME); 
+      long l2 = t2.getLong(Keys.FINISH_TIME) - t2.getLong(Keys.START_TIME);
+      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+    }
+  }; 
+  Comparator<JobHistory.Task> cShuffle = new Comparator<JobHistory.Task>(){
+    public int compare(JobHistory.Task t1, JobHistory.Task t2){
+      long l1 = t1.getLong(Keys.SHUFFLE_FINISHED) - 
+                t1.getLong(Keys.START_TIME); 
+      long l2 = t2.getLong(Keys.SHUFFLE_FINISHED) - 
+                t2.getLong(Keys.START_TIME); 
+      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+    }
+  };
+  Comparator<JobHistory.Task> cFinishMapRed = 
+    new Comparator<JobHistory.Task>() {
+    public int compare(JobHistory.Task t1, JobHistory.Task t2){
+      long l1 = t1.getLong(Keys.FINISH_TIME); 
+      long l2 = t2.getLong(Keys.FINISH_TIME);
+      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+    }
+  };
+
+  if (mapTasks.length > 0) {
+    Arrays.sort(mapTasks, cMap);
+    JobHistory.Task minMap = mapTasks[mapTasks.length-1] ;
+%>
+
+<h3>Time taken by best performing Map task 
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=minMap.get(Keys.TASKID)%>">
+<%=minMap.get(Keys.TASKID) %></a> : <%=StringUtils.formatTimeDiff(minMap.getLong(Keys.FINISH_TIME), minMap.getLong(Keys.START_TIME) ) %></h3>
+<h3>Average time taken by Map tasks: 
+<%=StringUtils.formatTimeDiff(avgMapTime, 0) %></h3>
+<h3>Worse performing map tasks</h3>
+<table border="2" cellpadding="5" cellspacing="2">
+<tr><td>Task Id</td><td>Time taken</td></tr>
+<%
+  for (int i=0;i<showTasks && i<mapTasks.length; i++) {
+%>
+    <tr>
+    <td><a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=mapTasks[i].get(Keys.TASKID)%>">
+        <%=mapTasks[i].get(Keys.TASKID) %></a></td>
+    <td><%=StringUtils.formatTimeDiff(mapTasks[i].getLong(Keys.FINISH_TIME), mapTasks[i].getLong(Keys.START_TIME)) %></td>
+    </tr>
+<%
+  }
+%>
+</table>
+<%  
+
+    Arrays.sort(mapTasks, cFinishMapRed);
+    JobHistory.Task lastMap = mapTasks[0] ;
+%>
+
+<h3>The last Map task 
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>
+&tipid=<%=lastMap.get(Keys.TASKID)%>"><%=lastMap.get(Keys.TASKID) %></a> 
+finished at (relative to the Job launch time): 
+<%=StringUtils.getFormattedTimeWithDiff(dateFormat, 
+                              lastMap.getLong(Keys.FINISH_TIME), 
+                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+<hr/>
+
+<%
+  }//end if(mapTasks.length > 0)
+
+  if (reduceTasks.length <= 0) return;
+  Arrays.sort(reduceTasks, cShuffle); 
+  JobHistory.Task minShuffle = reduceTasks[reduceTasks.length-1] ;
+%>
+<h3>Time taken by best performing shufflejobId
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>
+&tipid=<%=minShuffle.get(Keys.TASKID)%>"><%=minShuffle.get(Keys.TASKID)%></a> : 
+<%=StringUtils.formatTimeDiff(minShuffle.getLong(Keys.SHUFFLE_FINISHED), 
+                              minShuffle.getLong(Keys.START_TIME) ) %></h3>
+<h3>Average time taken by Shuffle: 
+<%=StringUtils.formatTimeDiff(avgShuffleTime, 0) %></h3>
+<h3>Worse performing Shuffle(s)</h3>
+<table border="2" cellpadding="5" cellspacing="2">
+<tr><td>Task Id</td><td>Time taken</td></tr>
+<%
+  for (int i=0;i<showTasks && i<reduceTasks.length; i++) {
+%>
+    <tr>
+    <td><a href="taskdetailshistory.jsp?logFile=
+<%=encodedLogFileName%>&tipid=<%=reduceTasks[i].get(Keys.TASKID)%>">
+<%=reduceTasks[i].get(Keys.TASKID) %></a></td>
+    <td><%=
+           StringUtils.formatTimeDiff(
+                       reduceTasks[i].getLong(Keys.SHUFFLE_FINISHED),
+                       reduceTasks[i].getLong(Keys.START_TIME)) %>
+    </td>
+    </tr>
+<%
+  }
+%>
+</table>
+<%  
+  Comparator<JobHistory.Task> cFinishShuffle = 
+    new Comparator<JobHistory.Task>() {
+    public int compare(JobHistory.Task t1, JobHistory.Task t2){
+      long l1 = t1.getLong(Keys.SHUFFLE_FINISHED); 
+      long l2 = t2.getLong(Keys.SHUFFLE_FINISHED);
+      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+    }
+  };
+  Arrays.sort(reduceTasks, cFinishShuffle);
+  JobHistory.Task lastShuffle = reduceTasks[0] ;
+%>
+
+<h3>The last Shuffle  
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>
+&tipid=<%=lastShuffle.get(Keys.TASKID)%>"><%=lastShuffle.get(Keys.TASKID)%>
+</a> finished at (relative to the Job launch time): 
+<%=StringUtils.getFormattedTimeWithDiff(dateFormat,
+                              lastShuffle.getLong(Keys.SHUFFLE_FINISHED), 
+                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+
+<%
+  Comparator<JobHistory.Task> cReduce = new Comparator<JobHistory.Task>(){
+    public int compare(JobHistory.Task t1, JobHistory.Task t2){
+      long l1 = t1.getLong(Keys.FINISH_TIME) - 
+                t1.getLong(Keys.SHUFFLE_FINISHED); 
+      long l2 = t2.getLong(Keys.FINISH_TIME) - 
+                t2.getLong(Keys.SHUFFLE_FINISHED);
+      return (l2<l1 ? -1 : (l2==l1 ? 0 : 1));
+    }
+  }; 
+  Arrays.sort(reduceTasks, cReduce); 
+  JobHistory.Task minReduce = reduceTasks[reduceTasks.length-1] ;
+%>
+<hr/>
+<h3>Time taken by best performing Reduce task : 
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=minReduce.get(Keys.TASKID)%>">
+<%=minReduce.get(Keys.TASKID) %></a> : 
+<%=StringUtils.formatTimeDiff(minReduce.getLong(Keys.FINISH_TIME),
+    minReduce.getLong(Keys.SHUFFLE_FINISHED) ) %></h3>
+
+<h3>Average time taken by Reduce tasks: 
+<%=StringUtils.formatTimeDiff(avgReduceTime, 0) %></h3>
+<h3>Worse performing reduce tasks</h3>
+<table border="2" cellpadding="5" cellspacing="2">
+<tr><td>Task Id</td><td>Time taken</td></tr>
+<%
+  for (int i=0;i<showTasks && i<reduceTasks.length; i++) {
+%>
+    <tr>
+    <td><a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=reduceTasks[i].get(Keys.TASKID)%>">
+        <%=reduceTasks[i].get(Keys.TASKID) %></a></td>
+    <td><%=StringUtils.formatTimeDiff(
+             reduceTasks[i].getLong(Keys.FINISH_TIME), 
+             reduceTasks[i].getLong(Keys.SHUFFLE_FINISHED)) %></td>
+    </tr>
+<%
+  }
+%>
+</table>
+<%  
+  Arrays.sort(reduceTasks, cFinishMapRed);
+  JobHistory.Task lastReduce = reduceTasks[0] ;
+%>
+
+<h3>The last Reduce task 
+<a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>
+&tipid=<%=lastReduce.get(Keys.TASKID)%>"><%=lastReduce.get(Keys.TASKID)%>
+</a> finished at (relative to the Job launch time): 
+<%=StringUtils.getFormattedTimeWithDiff(dateFormat,
+                              lastReduce.getLong(Keys.FINISH_TIME), 
+                              job.getLong(Keys.LAUNCH_TIME) ) %></h3>
+</center>
+</body></html>

Added: hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/job_authorization_error.jsp
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/job_authorization_error.jsp?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/job_authorization_error.jsp (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/job_authorization_error.jsp Fri Mar  4 04:32:45 2011
@@ -0,0 +1,52 @@
+<%
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file 
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+%>
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="javax.servlet.*"
+  import="javax.servlet.http.*"
+  import="java.io.*"
+  import="java.net.URL"
+  import="org.apache.hadoop.util.*"
+%>
+<%!	private static final long serialVersionUID = 1L;
+%>
+
+<html>
+<head>
+<title>Error: User cannot access this Job</title>
+</head>
+<body>
+<h2>Error: User cannot do this operation on this Job</h2><br>
+
+<%
+  String errorMsg = (String) request.getAttribute("error.msg");
+%>
+
+<font size="5"> 
+<%
+  out.println(errorMsg);
+%>
+</font>
+
+<hr>
+
+<%
+out.println(ServletUtil.htmlFooter());
+%>

Added: hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobconf_history.jsp
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobconf_history.jsp?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobconf_history.jsp (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobconf_history.jsp Fri Mar  4 04:32:45 2011
@@ -0,0 +1,71 @@
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="javax.servlet.*"
+  import="javax.servlet.http.*"
+  import="java.io.*"
+  import="org.apache.hadoop.mapred.*"
+  import="org.apache.hadoop.fs.*"
+  import="org.apache.hadoop.util.*"
+  import="org.apache.hadoop.mapreduce.JobACL"
+  import="org.apache.hadoop.security.UserGroupInformation"
+  import="org.apache.hadoop.security.authorize.AccessControlList"
+  import="org.apache.hadoop.security.AccessControlException"
+%>
+
+<%!	private static final long serialVersionUID = 1L;
+%>
+
+<%
+  String logFileString = request.getParameter("logFile");
+  if (logFileString == null) {
+    out.println("<h2>Missing 'logFile' for fetching job configuration!</h2>");
+    return;
+  }
+
+  Path logFile = new Path(logFileString);
+  String jobId = JSPUtil.getJobID(logFile.getName());
+
+%>
+  
+<html>
+
+<title>Job Configuration: JobId - <%= jobId %></title>
+
+<body>
+<h2>Job Configuration: JobId - <%= jobId %></h2><br>
+
+<%
+  Path jobFilePath = JSPUtil.getJobConfFilePath(logFile);
+  FileSystem fs = (FileSystem) application.getAttribute("fileSys");
+  FSDataInputStream jobFile = null; 
+  try {
+    jobFile = fs.open(jobFilePath);
+    JobConf jobConf = new JobConf(jobFilePath);
+    JobConf clusterConf = (JobConf) application.getAttribute("jobConf");
+    ACLsManager aclsManager = (ACLsManager) application.getAttribute("aclManager");
+
+    JobHistory.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+        response, clusterConf, aclsManager, fs, logFile);
+    if (job == null) {
+      return;
+    }
+
+    XMLUtils.transform(
+        jobConf.getConfResourceAsInputStream("webapps/static/jobconf.xsl"),
+        jobFile, out);
+  } catch (Exception e) {
+    out.println("Failed to retreive job configuration for job '" + jobId + "!");
+    out.println(e);
+  } finally {
+    if (jobFile != null) {
+      try { 
+        jobFile.close(); 
+      } catch (IOException e) {}
+    }
+  } 
+%>
+
+<br>
+<%
+out.println(ServletUtil.htmlFooter());
+%>

Added: hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobdetailshistory.jsp
URL: http://svn.apache.org/viewvc/hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobdetailshistory.jsp?rev=1077590&view=auto
==============================================================================
--- hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobdetailshistory.jsp (added)
+++ hadoop/common/branches/branch-0.20-security-patches/src/webapps/history/jobdetailshistory.jsp Fri Mar  4 04:32:45 2011
@@ -0,0 +1,344 @@
+<%@ page
+  contentType="text/html; charset=UTF-8"
+  import="javax.servlet.http.*"
+  import="java.io.*"
+  import="java.util.*"
+  import="org.apache.hadoop.fs.*"
+  import="org.apache.hadoop.http.HtmlQuoting"
+  import="org.apache.hadoop.mapred.*"
+  import="org.apache.hadoop.util.*"
+  import="java.text.*"
+  import="org.apache.hadoop.mapred.JobHistory.*"
+  import="java.security.PrivilegedExceptionAction"
+  import="org.apache.hadoop.security.AccessControlException"
+  import="org.apache.hadoop.mapreduce.JobACL"
+  import="org.apache.hadoop.security.authorize.AccessControlList"
+%>
+<%!	private static final long serialVersionUID = 1L;
+%>
+
+<%! static SimpleDateFormat dateFormat = new SimpleDateFormat("d-MMM-yyyy HH:mm:ss") ; %>
+<%
+    String logFile = request.getParameter("logFile");
+
+    String jobid = JSPUtil.getJobID(new Path(logFile).getName());
+	
+    FileSystem fs = (FileSystem) application.getAttribute("fileSys");
+    JobConf jobConf = (JobConf) application.getAttribute("jobConf");
+    ACLsManager aclsManager = (ACLsManager) application.getAttribute("aclManager");
+    JobHistory.JobInfo job = JSPUtil.checkAccessAndGetJobInfo(request,
+        response, jobConf, aclsManager, fs, new Path(logFile));
+    if (job == null) {
+      return;
+    }
+
+  	String encodedLogFileName = JobHistory.JobInfo.encodeJobHistoryFilePath(logFile);
+%>
+
+<html>
+<head>
+<title>Hadoop Job <%=jobid%> on History Viewer</title>
+<link rel="stylesheet" type="text/css" href="/static/hadoop.css">
+</head>
+<body>
+
+<h2>Hadoop Job <%=jobid %> on <a href="jobhistoryhome.jsp">History Viewer</a></h2>
+
+<b>User: </b> <%=HtmlQuoting.quoteHtmlChars(job.get(Keys.USER)) %><br/> 
+<b>JobName: </b> <%=HtmlQuoting.quoteHtmlChars(job.get(Keys.JOBNAME)) %><br/>  
+<b>JobConf: </b> <a href="jobconf_history.jsp?logFile=<%=encodedLogFileName%>"> 
+                 <%=job.get(Keys.JOBCONF) %></a><br/> 
+<%         
+  Map<JobACL, AccessControlList> jobAcls = job.getJobACLs();
+  JSPUtil.printJobACLs(jobConf, jobAcls, out);
+%> 
+<b>Submitted At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.SUBMIT_TIME), 0 )  %><br/> 
+<b>Launched At: </b> <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.LAUNCH_TIME), job.getLong(Keys.SUBMIT_TIME)) %><br/>
+<b>Finished At: </b>  <%=StringUtils.getFormattedTimeWithDiff(dateFormat, job.getLong(Keys.FINISH_TIME), job.getLong(Keys.LAUNCH_TIME)) %><br/>
+<b>Status: </b> <%= ((job.get(Keys.JOB_STATUS) == "")?"Incomplete" :job.get(Keys.JOB_STATUS)) %><br/> 
+<%
+    Map<String, JobHistory.Task> tasks = job.getAllTasks();
+    int totalMaps = 0 ; 
+    int totalReduces = 0;
+    int totalCleanups = 0; 
+    int totalSetups = 0; 
+    int numFailedMaps = 0; 
+    int numKilledMaps = 0;
+    int numFailedReduces = 0 ; 
+    int numKilledReduces = 0;
+    int numFinishedCleanups = 0;
+    int numFailedCleanups = 0;
+    int numKilledCleanups = 0;
+    int numFinishedSetups = 0;
+    int numFailedSetups = 0;
+    int numKilledSetups = 0;
+	
+    long mapStarted = 0 ; 
+    long mapFinished = 0 ; 
+    long reduceStarted = 0 ; 
+    long reduceFinished = 0;
+    long cleanupStarted = 0;
+    long cleanupFinished = 0; 
+    long setupStarted = 0;
+    long setupFinished = 0; 
+        
+    Map <String,String> allHosts = new TreeMap<String,String>();
+    for (JobHistory.Task task : tasks.values()) {
+      Map<String, TaskAttempt> attempts = task.getTaskAttempts();
+      allHosts.put(task.get(Keys.HOSTNAME), "");
+      for (TaskAttempt attempt : attempts.values()) {
+        long startTime = attempt.getLong(Keys.START_TIME) ; 
+        long finishTime = attempt.getLong(Keys.FINISH_TIME) ; 
+        if (Values.MAP.name().equals(task.get(Keys.TASK_TYPE))){
+          if (mapStarted==0 || mapStarted > startTime ) {
+            mapStarted = startTime; 
+          }
+          if (mapFinished < finishTime ) {
+            mapFinished = finishTime ; 
+          }
+          totalMaps++; 
+          if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFailedMaps++; 
+          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numKilledMaps++;
+          }
+        } else if (Values.REDUCE.name().equals(task.get(Keys.TASK_TYPE))) {
+          if (reduceStarted==0||reduceStarted > startTime) {
+            reduceStarted = startTime ; 
+          }
+          if (reduceFinished < finishTime) {
+            reduceFinished = finishTime; 
+          }
+          totalReduces++; 
+          if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFailedReduces++;
+          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numKilledReduces++;
+          }
+        } else if (Values.CLEANUP.name().equals(task.get(Keys.TASK_TYPE))) {
+          if (cleanupStarted==0||cleanupStarted > startTime) {
+            cleanupStarted = startTime ; 
+          }
+          if (cleanupFinished < finishTime) {
+            cleanupFinished = finishTime; 
+          }
+          totalCleanups++; 
+          if (Values.SUCCESS.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFinishedCleanups++;
+          } else if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFailedCleanups++;
+          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numKilledCleanups++;
+          } 
+        } else if (Values.SETUP.name().equals(task.get(Keys.TASK_TYPE))) {
+          if (setupStarted==0||setupStarted > startTime) {
+            setupStarted = startTime ; 
+          }
+          if (setupFinished < finishTime) {
+            setupFinished = finishTime; 
+          }
+          totalSetups++; 
+          if (Values.SUCCESS.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFinishedSetups++;
+          } else if (Values.FAILED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numFailedSetups++;
+          } else if (Values.KILLED.name().equals(attempt.get(Keys.TASK_STATUS))) {
+            numKilledSetups++;
+          }
+        }
+      }
+    }
+%>
+<b><a href="analysejobhistory.jsp?logFile=<%=encodedLogFileName%>">Analyse This Job</a></b> 
+<hr/>
+<center>
+<table border="2" cellpadding="5" cellspacing="2">
+<tr>
+<td>Kind</td><td>Total Tasks(successful+failed+killed)</td><td>Successful tasks</td><td>Failed tasks</td><td>Killed tasks</td><td>Start Time</td><td>Finish Time</td>
+</tr>
+<tr>
+<td>Setup</td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=all">
+        <%=totalSetups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.SUCCESS %>">
+        <%=numFinishedSetups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.FAILED %>">
+        <%=numFailedSetups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.SETUP.name() %>&status=<%=Values.KILLED %>">
+        <%=numKilledSetups%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, setupStarted, 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, setupFinished, setupStarted) %></td>
+</tr>
+<tr>
+<td>Map</td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=all">
+        <%=totalMaps %></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.SUCCESS %>">
+        <%=job.getInt(Keys.FINISHED_MAPS) %></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.FAILED %>">
+        <%=numFailedMaps %></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.MAP.name() %>&status=<%=Values.KILLED %>">
+        <%=numKilledMaps %></a></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, mapStarted, 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, mapFinished, mapStarted) %></td>
+</tr>
+<tr>
+<td>Reduce</td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=all">
+        <%=totalReduces%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.SUCCESS %>">
+        <%=job.getInt(Keys.FINISHED_REDUCES)%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.FAILED %>">
+        <%=numFailedReduces%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.REDUCE.name() %>&status=<%=Values.KILLED %>">
+        <%=numKilledReduces%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, reduceStarted, 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, reduceFinished, reduceStarted) %></td>
+</tr>
+<tr>
+<td>Cleanup</td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=all">
+        <%=totalCleanups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.SUCCESS %>">
+        <%=numFinishedCleanups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.FAILED %>">
+        <%=numFailedCleanups%></a></td>
+    <td><a href="jobtaskshistory.jsp?logFile=<%=encodedLogFileName%>&taskType=<%=Values.CLEANUP.name() %>&status=<%=Values.KILLED %>">
+        <%=numKilledCleanups%></a></td>  
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, cleanupStarted, 0) %></td>
+    <td><%=StringUtils.getFormattedTimeWithDiff(dateFormat, cleanupFinished, cleanupStarted) %></td>
+</tr>
+</table>
+
+<br>
+<br>
+
+<table border=2 cellpadding="5" cellspacing="2">
+  <tr>
+  <th><br/></th>
+  <th>Counter</th>
+  <th>Map</th>
+  <th>Reduce</th>
+  <th>Total</th>
+</tr>
+
+<%  
+
+ Counters totalCounters = 
+   Counters.fromEscapedCompactString(job.get(Keys.COUNTERS));
+ Counters mapCounters = 
+   Counters.fromEscapedCompactString(job.get(Keys.MAP_COUNTERS));
+ Counters reduceCounters = 
+   Counters.fromEscapedCompactString(job.get(Keys.REDUCE_COUNTERS));
+
+ if (totalCounters != null) {
+   for (String groupName : totalCounters.getGroupNames()) {
+     Counters.Group totalGroup = totalCounters.getGroup(groupName);
+     Counters.Group mapGroup = mapCounters.getGroup(groupName);
+     Counters.Group reduceGroup = reduceCounters.getGroup(groupName);
+  
+     Format decimal = new DecimalFormat();
+  
+     boolean isFirst = true;
+     Iterator<Counters.Counter> ctrItr = totalGroup.iterator();
+     while(ctrItr.hasNext()) {
+       Counters.Counter counter = ctrItr.next();
+       String name = counter.getDisplayName();
+       String mapValue = 
+         decimal.format(mapGroup.getCounter(name));
+       String reduceValue = 
+         decimal.format(reduceGroup.getCounter(name));
+       String totalValue = decimal.format(counter.getCounter());
+%>
+       <tr>
+<%
+       if (isFirst) {
+         isFirst = false;
+%>
+         <td rowspan="<%=totalGroup.size()%>">
+         <%=HtmlQuoting.quoteHtmlChars(totalGroup.getDisplayName())%></td>
+<%
+       }
+%>
+       <td><%=HtmlQuoting.quoteHtmlChars(counter.getDisplayName())%></td>
+       <td align="right"><%=mapValue%></td>
+       <td align="right"><%=reduceValue%></td>
+       <td align="right"><%=totalValue%></td>
+     </tr>
+<%
+      }
+    }
+  }
+%>
+</table>
+<br>
+
+<br/>
+ <%
+    DefaultJobHistoryParser.FailedOnNodesFilter filter = 
+                 new DefaultJobHistoryParser.FailedOnNodesFilter();
+    JobHistory.parseHistoryFromFS(logFile, filter, fs); 
+    Map<String, Set<String>> badNodes = filter.getValues(); 
+    if (badNodes.size() > 0) {
+ %>
+<h3>Failed tasks attempts by nodes </h3>
+<table border="1">
+<tr><td>Hostname</td><td>Failed Tasks</td></tr>
+ <%	  
+      for (Map.Entry<String, Set<String>> entry : badNodes.entrySet()) {
+        String node = entry.getKey();
+        Set<String> failedTasks = entry.getValue();
+%>
+        <tr>
+        <td><%=node %></td>
+        <td>
+<%
+        for (String t : failedTasks) {
+%>
+          <a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=t %>"><%=t %></a>,&nbsp;
+<%		  
+        }
+%>	
+        </td>
+        </tr>
+<%	  
+      }
+	}
+ %>
+</table>
+<br/>
+
+ <%
+    DefaultJobHistoryParser.KilledOnNodesFilter killedFilter =
+                 new DefaultJobHistoryParser.KilledOnNodesFilter();
+    JobHistory.parseHistoryFromFS(logFile, filter, fs); 
+    badNodes = killedFilter.getValues(); 
+    if (badNodes.size() > 0) {
+ %>
+<h3>Killed tasks attempts by nodes </h3>
+<table border="1">
+<tr><td>Hostname</td><td>Killed Tasks</td></tr>
+ <%	  
+      for (Map.Entry<String, Set<String>> entry : badNodes.entrySet()) {
+        String node = entry.getKey();
+        Set<String> killedTasks = entry.getValue();
+%>
+        <tr>
+        <td><%=node %></td>
+        <td>
+<%
+        for (String t : killedTasks) {
+%>
+          <a href="taskdetailshistory.jsp?logFile=<%=encodedLogFileName%>&tipid=<%=t %>"><%=t %></a>,&nbsp;
+<%		  
+        }
+%>	
+        </td>
+        </tr>
+<%	  
+      }
+    }
+%>
+</table>
+</center>
+</body></html>