You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by pv...@apache.org on 2018/10/04 16:45:40 UTC

[2/2] hive git commit: HIVE-17300: WebUI query plan graphs (Karen Coppage, reviewed by Szehon Ho and Peter Vary)

HIVE-17300: WebUI query plan graphs (Karen Coppage, reviewed by Szehon Ho and Peter Vary)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/1cfe4f91
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/1cfe4f91
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/1cfe4f91

Branch: refs/heads/master
Commit: 1cfe4f913b7806707259c0894ddb991d2e3ddae4
Parents: 6c34a37
Author: Karen Coppage <ka...@cloudera.com>
Authored: Thu Oct 4 18:44:24 2018 +0200
Committer: Peter Vary <pv...@cloudera.com>
Committed: Thu Oct 4 18:44:24 2018 +0200

----------------------------------------------------------------------
 .../org/apache/hadoop/hive/common/LogUtils.java |  25 +
 .../org/apache/hadoop/hive/conf/HiveConf.java   |  11 +
 .../service/cli/session/TestQueryDisplay.java   |  83 +++
 .../java/org/apache/hadoop/hive/ql/Driver.java  |  20 +-
 .../org/apache/hadoop/hive/ql/MapRedStats.java  |  12 +-
 .../org/apache/hadoop/hive/ql/QueryDisplay.java |  82 +++
 .../org/apache/hadoop/hive/ql/QueryInfo.java    |  10 +
 .../apache/hadoop/hive/ql/exec/ExplainTask.java |   5 +-
 .../hive/ql/exec/mr/HadoopJobExecHelper.java    |  34 +-
 .../hadoop/hive/ql/exec/mr/MapRedTask.java      |  16 +
 .../org/apache/hive/tmpl/QueryProfileTmpl.jamon |  91 +++-
 .../service/cli/operation/SQLOperation.java     |   3 +
 .../static/css/query-plan-graph.css             |  22 +
 .../hive-webapps/static/js/query-plan-graph.js  | 533 +++++++++++++++++++
 .../resources/hive-webapps/static/js/vis.min.js |  63 +++
 15 files changed, 975 insertions(+), 35 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
index 5068eb5..874a3e1 100644
--- a/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
+++ b/common/src/java/org/apache/hadoop/hive/common/LogUtils.java
@@ -29,7 +29,10 @@ import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.logging.log4j.LogManager;
 import org.apache.logging.log4j.core.Appender;
+import org.apache.logging.log4j.core.appender.FileAppender;
+import org.apache.logging.log4j.core.appender.RollingFileAppender;
 import org.apache.logging.log4j.core.LoggerContext;
+import org.apache.logging.log4j.core.appender.RollingRandomAccessFileAppender;
 import org.apache.logging.log4j.core.appender.routing.RoutingAppender;
 import org.apache.logging.log4j.core.config.Configurator;
 import org.apache.logging.log4j.core.config.LoggerConfig;
@@ -231,6 +234,28 @@ public class LogUtils {
   }
 
   /**
+   * Get path of the log file for user to see on the WebUI.
+   */
+  public static String getLogFilePath() {
+    String logFilePath = null;
+    org.apache.logging.log4j.Logger rootLogger = LogManager.getRootLogger();
+    if (rootLogger instanceof org.apache.logging.log4j.core.Logger) {
+      org.apache.logging.log4j.core.Logger coreLogger =
+          (org.apache.logging.log4j.core.Logger)rootLogger;
+      for (Appender appender : coreLogger.getAppenders().values()) {
+        if (appender instanceof FileAppender) {
+          logFilePath = ((FileAppender) appender).getFileName();
+        } else if (appender instanceof RollingFileAppender) {
+          logFilePath = ((RollingFileAppender) appender).getFileName();
+        } else if (appender instanceof RollingRandomAccessFileAppender) {
+          logFilePath = ((RollingRandomAccessFileAppender) appender).getFileName();
+        }
+      }
+    }
+    return logFilePath;
+  }
+
+  /**
    * Stop the subordinate appender for the operation log so it will not leak a file descriptor.
    * @param routingAppenderName the name of the RoutingAppender
    * @param queryId the id of the query that is closing

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
----------------------------------------------------------------------
diff --git a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
index 799fc05..58951ef 100644
--- a/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
+++ b/common/src/java/org/apache/hadoop/hive/conf/HiveConf.java
@@ -3140,6 +3140,17 @@ public class HiveConf extends Configuration {
     HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT("hive.server2.webui.explain.output", false,
         "When set to true, the EXPLAIN output for every query is displayed"
             + " in the HS2 WebUI / Drilldown / Query Plan tab.\n"),
+    HIVE_SERVER2_WEBUI_SHOW_GRAPH("hive.server2.webui.show.graph", false,
+        "Set this to true to to display query plan as a graph instead of text in the WebUI. " +
+        "Only works with hive.server2.webui.explain.output set to true."),
+    HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE("hive.server2.webui.max.graph.size", 25,
+        "Max number of stages graph can display. If number of stages exceeds this, no query" +
+        "plan will be shown. Only works when hive.server2.webui.show.graph and " +
+        "hive.server2.webui.explain.output set to true."),
+    HIVE_SERVER2_WEBUI_SHOW_STATS("hive.server2.webui.show.stats", false,
+        "Set this to true to to display statistics for MapReduce tasks in the WebUI. " +
+        "Only works when hive.server2.webui.show.graph and hive.server2.webui.explain.output " +
+        "set to true."),
     HIVE_SERVER2_WEBUI_ENABLE_CORS("hive.server2.webui.enable.cors", false,
       "Whether to enable cross origin requests (CORS)\n"),
     HIVE_SERVER2_WEBUI_CORS_ALLOWED_ORIGINS("hive.server2.webui.cors.allowed.origins", "*",

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java
----------------------------------------------------------------------
diff --git a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java
index 95b46a8..a65388a 100644
--- a/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java
+++ b/itests/hive-unit/src/test/java/org/apache/hive/service/cli/session/TestQueryDisplay.java
@@ -151,6 +151,89 @@ public class TestQueryDisplay {
     }
   }
 
+  /**
+   * Test for the HiveConf options HIVE_SERVER2_WEBUI_SHOW_GRAPH,
+   * HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE.
+   */
+  @Test
+  public void checkWebuiShowGraph() throws Exception {
+    // WebUI-related boolean confs must be set before build, since the implementation of
+    // QueryProfileTmpl.jamon depends on them.
+    // They depend on HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT being set to true.
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT, true);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH, true);
+
+    HiveSession session = sessionManager
+        .createSession(new SessionHandle(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8),
+            TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8, "testuser", "", "",
+            new HashMap<String, String>(), false, "");
+    SessionState.start(conf);
+
+    session.getSessionConf()
+        .setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE, 0);
+    testGraphDDL(session, true);
+    session.getSessionConf()
+        .setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE, 40);
+    testGraphDDL(session, false);
+
+    session.close();
+    resetConfToDefaults();
+  }
+
+  private void testGraphDDL(HiveSession session, boolean exceedMaxGraphSize) throws Exception {
+    OperationHandle opHandleGraph = session.executeStatement("show tables", null);
+    session.closeOperation(opHandleGraph);
+
+    // Check for a query plan. If the graph size exceeds the max allowed, none should appear.
+    verifyDDLHtml("Query information not available.",
+        opHandleGraph.getHandleIdentifier().toString(), exceedMaxGraphSize);
+    verifyDDLHtml("STAGE DEPENDENCIES",
+        opHandleGraph.getHandleIdentifier().toString(), !exceedMaxGraphSize);
+    // Check that if plan Json is there, it is not empty
+    verifyDDLHtml("jsonPlan = {}", opHandleGraph.getHandleIdentifier().toString(), false);
+  }
+
+  /**
+   * Test for the HiveConf option HIVE_SERVER2_WEBUI_SHOW_STATS, which is available for MapReduce
+   * jobs only.
+   */
+  @Test
+  public void checkWebUIShowStats() throws Exception {
+    // WebUI-related boolean confs must be set before build. HIVE_SERVER2_WEBUI_SHOW_STATS depends
+    // on HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT and HIVE_SERVER2_WEBUI_SHOW_GRAPH being set to true.
+    conf.setVar(HiveConf.ConfVars.HIVE_EXECUTION_ENGINE, "mr");
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT, true);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH, true);
+    conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE, 40);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_STATS, true);
+
+    HiveSession session = sessionManager
+        .createSession(new SessionHandle(TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8),
+            TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V8, "testuser", "", "",
+            new HashMap<String, String>(), false, "");
+    SessionState.start(conf);
+
+    OperationHandle opHandleSetup =
+        session.executeStatement("CREATE TABLE statsTable (i int)", null);
+    session.closeOperation(opHandleSetup);
+    OperationHandle opHandleMrQuery =
+        session.executeStatement("INSERT INTO statsTable VALUES (0)", null);
+    session.closeOperation(opHandleMrQuery);
+
+    // INSERT queries include  a MapReduce task.
+    verifyDDLHtml("Counters", opHandleMrQuery.getHandleIdentifier().toString(), true);
+
+    session.close();
+    resetConfToDefaults();
+  }
+
+  private void resetConfToDefaults() {
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT, false);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH, false);
+    conf.setBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_STATS, false);
+    conf.setIntVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE, 25);
+  }
+
   private void verifyDDL(QueryInfo queryInfo, String stmt, String handle, boolean finished) {
 
     Assert.assertEquals(queryInfo.getUserName(), "testuser");

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
index 95619a4..6441e67 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/Driver.java
@@ -149,6 +149,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
 import org.apache.hive.common.util.ShutdownHookManager;
 import org.apache.hive.common.util.TxnIdUtils;
 import org.apache.thrift.TException;
+import org.json.JSONObject;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
@@ -1035,9 +1036,22 @@ public class Driver implements IDriver {
     PrintStream ps = new PrintStream(baos);
     try {
       List<Task<?>> rootTasks = sem.getAllRootTasks();
-      task.getJSONPlan(ps, rootTasks, sem.getFetchTask(), false, true, true, sem.getCboInfo(),
-          plan.getOptimizedQueryString());
-      ret = baos.toString();
+      if (conf.getBoolVar(ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH)) {
+        JSONObject jsonPlan = task.getJSONPlan(
+            null, rootTasks, sem.getFetchTask(), true, true, true, sem.getCboInfo(),
+            plan.getOptimizedQueryString());
+        if (jsonPlan.getJSONObject(ExplainTask.STAGE_DEPENDENCIES) != null &&
+            jsonPlan.getJSONObject(ExplainTask.STAGE_DEPENDENCIES).length() <=
+                conf.getIntVar(ConfVars.HIVE_SERVER2_WEBUI_MAX_GRAPH_SIZE)) {
+          ret = jsonPlan.toString();
+        } else {
+          ret = null;
+        }
+      } else {
+        task.getJSONPlan(ps, rootTasks, sem.getFetchTask(), false, true, true, sem.getCboInfo(),
+            plan.getOptimizedQueryString());
+        ret = baos.toString();
+      }
     } catch (Exception e) {
       LOG.warn("Exception generating explain output: " + e, e);
     }

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java b/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
index ac45ec4..f13b496 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/MapRedStats.java
@@ -40,13 +40,13 @@ public class MapRedStats {
   private static final String CLASS_NAME = MapRedStats.class.getName();
   private static final Logger LOG = LoggerFactory.getLogger(CLASS_NAME);
   private JobConf jobConf;
-  int numMap;
-  int numReduce;
-  long cpuMSec;
-  Counters counters = null;
-  boolean success;
+  private int numMap;
+  private int numReduce;
+  private long cpuMSec;
+  private Counters counters = null;
+  private boolean success;
 
-  String jobId;
+  private String jobId;
 
   private long numModifiedRows;
 

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
index 9a77c29..79cfd84 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryDisplay.java
@@ -18,16 +18,22 @@
 package org.apache.hadoop.hive.ql;
 
 import com.google.common.collect.ImmutableMap;
+import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.ql.exec.Task;
 import org.apache.hadoop.hive.ql.exec.TaskResult;
 import org.apache.hadoop.hive.ql.plan.api.StageType;
 
+import java.io.IOException;
 import java.io.Serializable;
 import java.util.*;
 
+import org.apache.hadoop.mapred.Counters;
+import org.apache.hadoop.mapred.RunningJob;
 import org.codehaus.jackson.annotate.JsonIgnoreProperties;
 import org.codehaus.jackson.annotate.JsonWriteNullProperties;
 import org.codehaus.jackson.annotate.JsonIgnore;
+import org.json.JSONException;
+import org.json.JSONObject;
 
 /**
  * Some limited query information to save for WebUI.
@@ -56,16 +62,40 @@ public class QueryDisplay {
     tasks.get(tTask.getId()).updateStatus(tTask);
   }
 
+  public synchronized <T extends Serializable> void updateTaskStatistics(MapRedStats mapRedStats,
+      RunningJob rj, String taskId) throws IOException, JSONException {
+    if (tasks.containsKey(taskId)) {
+      tasks.get(taskId).updateMapRedStatsJson(mapRedStats, rj);
+    }
+  }
+
   //Inner classes
   public enum Phase {
     COMPILATION,
     EXECUTION,
   }
 
+  public String getFullLogLocation() {
+    return LogUtils.getLogFilePath();
+  }
+
   @JsonWriteNullProperties(false)
   @JsonIgnoreProperties(ignoreUnknown = true)
   public static class TaskDisplay {
 
+    public static final String NUMBER_OF_MAPPERS = "Number of Mappers";
+    public static final String NUMBER_OF_REDUCERS = "Number of Reducers";
+    public static final String COUNTERS = "Counters";
+    public static final String JOB_ID = "Job Id";
+    public static final String JOB_FILE = "Job File";
+    public static final String TRACKING_URL = "Tracking URL";
+    public static final String MAP_PROGRESS = "Map Progress (%)";
+    public static final String REDUCE_PROGRESS = "Reduce Progress (%)";
+    public static final String CLEANUP_PROGRESS = "Cleanup Progress (%)";
+    public static final String SETUP_PROGRESS = "Setup Progress (%)";
+    public static final String COMPLETE = "Complete";
+    public static final String SUCCESSFUL = "Successful";
+
     private Integer returnValue;  //if set, determines that task is complete.
     private String errorMsg;
 
@@ -80,6 +110,7 @@ public class QueryDisplay {
     private String name;
     private boolean requireLock;
     private String statusMessage;
+    private JSONObject statsJSON;
 
     // required for jackson
     public TaskDisplay() {
@@ -103,6 +134,57 @@ public class QueryDisplay {
       }
     }
 
+    private void updateMapRedStatsJson(MapRedStats stats, RunningJob rj) throws IOException, JSONException {
+      if (statsJSON == null) {
+        statsJSON = new JSONObject();
+      }
+      if (stats != null) {
+        if (stats.getNumMap() >= 0) {
+          statsJSON.put(NUMBER_OF_MAPPERS, stats.getNumMap());
+        }
+        if (stats.getNumReduce() >= 0) {
+          statsJSON.put(NUMBER_OF_REDUCERS, stats.getNumReduce());
+        }
+        if (stats.getCounters() != null) {
+          statsJSON.put(COUNTERS, getCountersJson(stats.getCounters()));
+        }
+      }
+      if (rj != null) {
+        statsJSON.put(JOB_ID, rj.getID().toString());
+        statsJSON.put(JOB_FILE, rj.getJobFile());
+        statsJSON.put(TRACKING_URL, rj.getTrackingURL());
+        statsJSON.put(MAP_PROGRESS, Math.round(rj.mapProgress() * 100));
+        statsJSON.put(REDUCE_PROGRESS, Math.round(rj.reduceProgress() * 100));
+        statsJSON.put(CLEANUP_PROGRESS, Math.round(rj.cleanupProgress() * 100));
+        statsJSON.put(SETUP_PROGRESS, Math.round(rj.setupProgress() * 100));
+        statsJSON.put(COMPLETE, rj.isComplete());
+        statsJSON.put(SUCCESSFUL, rj.isSuccessful());
+      }
+    }
+
+    public synchronized String getStatsJsonString() {
+      if (statsJSON != null) {
+        return statsJSON.toString();
+      }
+      return null;
+    }
+
+    private JSONObject getCountersJson(Counters ctrs) throws JSONException {
+      JSONObject countersJson = new JSONObject();
+      Iterator<Counters.Group> iterator = ctrs.iterator();
+      while(iterator.hasNext()) {
+        Counters.Group group = iterator.next();
+        Iterator<Counters.Counter> groupIterator = group.iterator();
+        JSONObject groupJson = new JSONObject();
+        while(groupIterator.hasNext()) {
+          Counters.Counter counter = groupIterator.next();
+          groupJson.put(counter.getDisplayName(), counter.getCounter());
+        }
+        countersJson.put(group.getDisplayName(), groupJson);
+      }
+      return countersJson;
+    }
+
     public synchronized Long getElapsedTime() {
       if (endTime == null) {
         if (beginTime == null) {

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java b/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java
index 0f127ec..376037a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/QueryInfo.java
@@ -32,6 +32,8 @@ public class QueryInfo {
   private String state;
   private QueryDisplay queryDisplay;
 
+  private String operationLogLocation;
+
   public QueryInfo(String state, String userName, String executionEngine, String operationId) {
     this.state = state;
     this.userName = userName;
@@ -99,4 +101,12 @@ public class QueryInfo {
   public synchronized Long getRuntime() {
     return runtime;
   }
+
+  public String getOperationLogLocation() {
+    return operationLogLocation;
+  }
+
+  public void setOperationLogLocation(String operationLogLocation) {
+    this.operationLogLocation = operationLogLocation;
+  }
 }

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
index 2f20dd4..46bf088 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/ExplainTask.java
@@ -82,6 +82,7 @@ import com.google.common.annotations.VisibleForTesting;
  *
  **/
 public class ExplainTask extends Task<ExplainWork> implements Serializable {
+  public static final String STAGE_DEPENDENCIES = "STAGE DEPENDENCIES";
   private static final long serialVersionUID = 1L;
   public static final String EXPL_COLUMN_NAME = "Explain";
   private final Set<Operator<?>> visitedOps = new HashSet<Operator<?>>();
@@ -283,7 +284,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
         if (cboInfo != null) {
           outJSONObject.put("cboInfo", cboInfo);
         }
-        outJSONObject.put("STAGE DEPENDENCIES", jsonDependencies);
+        outJSONObject.put(STAGE_DEPENDENCIES, jsonDependencies);
       }
 
       // Go over all the tasks and dump out the plans
@@ -1216,7 +1217,7 @@ public class ExplainTask extends Task<ExplainWork> implements Serializable {
       throws Exception {
 
     if (out != null) {
-      out.println("STAGE DEPENDENCIES:");
+      out.println(STAGE_DEPENDENCIES + ":");
     }
 
     JSONObject json = jsonOutput ? new JSONObject(new LinkedHashMap<>()) : null;

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
index eb6cbf7..cb8d81a 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/HadoopJobExecHelper.java
@@ -30,6 +30,7 @@ import java.util.List;
 import java.util.Map;
 import java.util.concurrent.TimeUnit;
 
+import org.apache.hadoop.hive.common.LogUtils;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.conf.HiveConf.ConfVars;
 import org.apache.hadoop.hive.ql.Context;
@@ -56,11 +57,6 @@ import org.apache.hadoop.mapred.RunningJob;
 import org.apache.hadoop.mapred.TaskCompletionEvent;
 import org.apache.hadoop.mapred.TaskReport;
 import org.apache.hive.common.util.ShutdownHookManager;
-import org.apache.logging.log4j.LogManager;
-import org.apache.logging.log4j.core.Appender;
-import org.apache.logging.log4j.core.Logger;
-import org.apache.logging.log4j.core.appender.FileAppender;
-import org.apache.logging.log4j.core.appender.RollingFileAppender;
 import org.slf4j.LoggerFactory;
 
 public class HadoopJobExecHelper {
@@ -239,6 +235,10 @@ public class HadoopJobExecHelper {
     List<ClientStatsPublisher> clientStatPublishers = getClientStatPublishers();
     final boolean localMode = ShimLoader.getHadoopShims().isLocalMode(job);
 
+    MapRedStats mapRedStats = new MapRedStats(
+            job, numMap, numReduce, cpuMsec, false, rj.getID().toString());
+    updateMapRedTaskWebUIStatistics(mapRedStats, rj);
+
     while (!rj.isComplete()) {
       if (th.getContext() != null) {
         th.getContext().checkHeartbeaterLockException();
@@ -314,6 +314,11 @@ public class HadoopJobExecHelper {
 
       Counters ctrs = th.getCounters();
 
+      mapRedStats.setCounters(ctrs);
+      mapRedStats.setNumMap(numMap);
+      mapRedStats.setNumReduce(numReduce);
+      updateMapRedTaskWebUIStatistics(mapRedStats, rj);
+
       if (fatal = checkFatalErrors(ctrs, errMsg)) {
         console.printError("[Fatal Error] " + errMsg.toString() + ". Killing the job.");
         rj.killJob();
@@ -419,8 +424,10 @@ public class HadoopJobExecHelper {
       }
     }
 
-    MapRedStats mapRedStats = new MapRedStats(job, numMap, numReduce, cpuMsec, success, rj.getID().toString());
+    mapRedStats.setSuccess(success);
     mapRedStats.setCounters(ctrs);
+    mapRedStats.setCpuMSec(cpuMsec);
+    updateMapRedTaskWebUIStatistics(mapRedStats, rj);
 
     // update based on the final value of the counters
     updateCounters(ctrs, rj);
@@ -443,6 +450,12 @@ public class HadoopJobExecHelper {
     return mapRedStats;
   }
 
+  private void updateMapRedTaskWebUIStatistics(MapRedStats mapRedStats, RunningJob rj) {
+    if (task instanceof MapRedTask) {
+      ((MapRedTask) task).updateWebUiStats(mapRedStats, rj);
+    }
+  }
+
 
   private String getId() {
     return this.task.getId();
@@ -511,14 +524,7 @@ public class HadoopJobExecHelper {
     sb.append("Task ID:\n  " + taskId + "\n\n");
     sb.append("Logs:\n");
     console.printError(sb.toString());
-
-    for (Appender appender : ((Logger) LogManager.getRootLogger()).getAppenders().values()) {
-      if (appender instanceof FileAppender) {
-        console.printError(((FileAppender) appender).getFileName());
-      } else if (appender instanceof RollingFileAppender) {
-        console.printError(((RollingFileAppender) appender).getFileName());
-      }
-    }
+    console.printError(LogUtils.getLogFilePath());
   }
 
   public int progressLocal(Process runningJob, String taskId) {

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
index a71faf8..8266906 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/mr/MapRedTask.java
@@ -38,6 +38,7 @@ import org.apache.hadoop.hive.common.metrics.common.MetricsConstant;
 import org.apache.hadoop.hive.conf.HiveConf;
 import org.apache.hadoop.hive.ql.Context;
 import org.apache.hadoop.hive.ql.DriverContext;
+import org.apache.hadoop.hive.ql.MapRedStats;
 import org.apache.hadoop.hive.ql.exec.Operator;
 import org.apache.hadoop.hive.ql.exec.SerializationUtilities;
 import org.apache.hadoop.hive.ql.exec.Utilities;
@@ -49,6 +50,8 @@ import org.apache.hadoop.hive.ql.session.SessionState;
 import org.apache.hadoop.hive.ql.session.SessionState.ResourceType;
 import org.apache.hadoop.hive.shims.ShimLoader;
 import org.apache.hive.common.util.StreamPrinter;
+import org.apache.hadoop.mapred.RunningJob;
+import org.json.JSONException;
 
 /**
  * Extension of ExecDriver:
@@ -496,6 +499,19 @@ public class MapRedTask extends ExecDriver implements Serializable {
     return null;
   }
 
+  public void updateWebUiStats(MapRedStats mapRedStats, RunningJob rj) {
+    if (queryDisplay != null &&
+        conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_STATS) &&
+        conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH) &&
+        conf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT)) {
+      try {
+        queryDisplay.updateTaskStatistics(mapRedStats, rj, getId());
+      } catch (IOException | JSONException e) {
+        LOG.error(org.apache.hadoop.util.StringUtils.stringifyException(e), e);
+      }
+    }
+  }
+
   @Override
   public void shutdown() {
     super.shutdown();

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
----------------------------------------------------------------------
diff --git a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
index f04d655..fd3fbc1 100644
--- a/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
+++ b/service/src/jamon/org/apache/hive/tmpl/QueryProfileTmpl.jamon
@@ -26,6 +26,10 @@ org.apache.hadoop.hive.ql.QueryDisplay;
 org.apache.hadoop.hive.ql.QueryInfo;
 org.apache.hadoop.hive.conf.HiveConf;
 </%import>
+<%class>
+private boolean showGraph = false;
+private boolean showStats = false;
+</%class>
 <!--[if IE]>
 <!DOCTYPE html>
 <![endif]-->
@@ -40,6 +44,22 @@ org.apache.hadoop.hive.conf.HiveConf;
     <link href="/static/css/bootstrap.min.css" rel="stylesheet">
     <link href="/static/css/bootstrap-theme.min.css" rel="stylesheet">
     <link href="/static/css/hive.css" rel="stylesheet">
+
+    <script src="/static/js/jquery.min.js" type="text/javascript"></script>
+    <script src="/static/js/bootstrap.min.js" type="text/javascript"></script>
+    <script src="/static/js/tab.js" type="text/javascript"></script>
+    <%if hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_GRAPH) &&
+      hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT) %>
+      <%java showGraph = true; %>
+      <%if hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_SHOW_STATS) %>
+        <%java showStats = true; %>
+      </%if>
+    </%if>
+    <%if showGraph %>
+      <script type="text/javascript" src="/static/js/vis.min.js"></script>
+      <link href="/static/css/query-plan-graph.css" rel="stylesheet">
+      <script src="/static/js/query-plan-graph.js"></script>
+    </%if>
   </head>
 
   <body>
@@ -204,18 +224,72 @@ org.apache.hadoop.hive.conf.HiveConf;
     <div class="panel panel-default">
       <div class="panel-heading">Explain plan</div>
       <div class="panel-body">
-        <%if hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT) %>
-          <pre>
-          <% queryInfo.getQueryDisplay() == null ? "Unknown" : queryInfo.getQueryDisplay().getExplainPlan() %>
-          </pre>
+        <%if showGraph %>
+          <%if queryInfo.getQueryDisplay() != null &&
+                queryInfo.getQueryDisplay().getExplainPlan() != null &&
+                !queryInfo.getQueryDisplay().getExplainPlan().equals("UNKNOWN")%>
+            <div id='plan-visualization'></div>
+            <%if showStats %>
+              <div class="row">
+                <div class="col-md-6">
+                  <pre id='stage-info'></pre>
+                </div>
+                <div class="col-md-6">
+                  <pre id='statistics-info'><div id='statistics-info-head'></div><div id='statistics-info-body'></div></pre>
+                </div>
+              </div>
+            <%else>
+              <pre id='stage-info'></pre>
+            </%if>
+            <script type="text/javascript">
+              var jsonPlan = <% queryInfo.getQueryDisplay().getExplainPlan() %>
+              var jsonStatuses = {};
+              var jsonStatistics = {};
+              var jsonLogs = {};
+              <%if queryInfo.getOperationLogLocation() != null %>
+                jsonLogs["Operation Log - will be deleted after query completes"] = "<% queryInfo.getOperationLogLocation() %>"
+              </%if>
+              <%if queryInfo.getQueryDisplay().getFullLogLocation() != null %>
+                jsonLogs["Full Log"] = "<% queryInfo.getQueryDisplay().getFullLogLocation() %>"
+              </%if>
+              <%for QueryDisplay.TaskDisplay taskDisplay : queryInfo.getQueryDisplay().getTaskDisplays() %>
+                jsonStatuses["<% taskDisplay.getTaskId() %>"] = "<% taskDisplay.getStatus() %>";
+                <%if showStats && taskDisplay.getStatsJsonString() != null %>
+                  jsonStatistics["<% taskDisplay.getTaskId() %>"] = <% taskDisplay.getStatsJsonString() %>;
+                </%if>
+              </%for>
+              const MAP_PROGRESS = '<% QueryDisplay.TaskDisplay.MAP_PROGRESS %>';
+              const REDUCE_PROGRESS = '<% QueryDisplay.TaskDisplay.REDUCE_PROGRESS %>';
+              network = visualizeJsonPlan('plan-visualization', 'stage-info', 'statistics-info', 'statistics-info-head',
+                'statistics-info-body', jsonPlan, jsonStatuses, jsonStatistics, jsonLogs);
+            </script>
+          <%else>
+            <pre>Query information not available. Query may have failed or plan size may exceed value of config hive.server2.webui.max.graph.size.</pre>
+          </%if>
         <%else>
-          <pre>
-          Set configuration hive.server2.webui.explain.output to true to view future query plans
-          </pre>
+          <%if hiveConf.getBoolVar(HiveConf.ConfVars.HIVE_SERVER2_WEBUI_EXPLAIN_OUTPUT) %>
+            <pre>
+            <% queryInfo.getQueryDisplay() == null ? "Unknown" : queryInfo.getQueryDisplay().getExplainPlan() %>
+            </pre>
+          <%else>
+            <pre>
+            Set configuration hive.server2.webui.explain.output to true to view future query plans
+            </pre>
+          </%if>
         </%if>
       </div>
     </div>
 </%def>
+<%if showGraph %>
+  <script type="text/javascript">
+    // reload network to centralize graph upon switching to "Query Plan" tab
+    $('a[data-toggle="tab"]').on('shown.bs.tab', function (e) {
+      if (e.target.toString().includes("queryPlan") && typeof network !== 'undefined') {
+        network.fit();
+      }
+    });
+  </script>
+</%if>
 
 
 <%def perfLogging>
@@ -301,8 +375,5 @@ org.apache.hadoop.hive.conf.HiveConf;
 
 </div>
 </div>
-<script src="/static/js/jquery.min.js" type="text/javascript"></script>
-<script src="/static/js/bootstrap.min.js" type="text/javascript"></script>
-<script src="/static/js/tab.js" type="text/javascript"></script>
 </body>
 </html>

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
----------------------------------------------------------------------
diff --git a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
index 0e6bd4d..01c9249 100644
--- a/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
+++ b/service/src/java/org/apache/hive/service/cli/operation/SQLOperation.java
@@ -184,6 +184,9 @@ public class SQLOperation extends ExecuteStatementOperation {
       }
 
       queryInfo.setQueryDisplay(driver.getQueryDisplay());
+      if (operationLog != null) {
+        queryInfo.setOperationLogLocation(operationLog.toString());
+      }
 
       // set the operation handle information in Driver, so that thrift API users
       // can use the operation handle they receive, to lookup query information in

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/service/src/resources/hive-webapps/static/css/query-plan-graph.css
----------------------------------------------------------------------
diff --git a/service/src/resources/hive-webapps/static/css/query-plan-graph.css b/service/src/resources/hive-webapps/static/css/query-plan-graph.css
new file mode 100644
index 0000000..d4d8c4b
--- /dev/null
+++ b/service/src/resources/hive-webapps/static/css/query-plan-graph.css
@@ -0,0 +1,22 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#plan-visualization {
+  width: 100%;
+  min-height: 300px;
+  border: 1px solid grey;
+  }
\ No newline at end of file

http://git-wip-us.apache.org/repos/asf/hive/blob/1cfe4f91/service/src/resources/hive-webapps/static/js/query-plan-graph.js
----------------------------------------------------------------------
diff --git a/service/src/resources/hive-webapps/static/js/query-plan-graph.js b/service/src/resources/hive-webapps/static/js/query-plan-graph.js
new file mode 100644
index 0000000..ac1d1a3
--- /dev/null
+++ b/service/src/resources/hive-webapps/static/js/query-plan-graph.js
@@ -0,0 +1,533 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements.  See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership.  The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License.  You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+/**
+* query-plan-graph.js
+*
+* Display a visualization of query plan
+*/
+
+const DEFAULT_TEXT_INDENT = 2;
+const MAX_ORDER_TRIES = 100;
+
+const SUCCESS = "Success";
+const RUNNING = "Running";
+const FAILED = "Failure";
+
+const PROGRESS_ELEMENT = "progress-bar-element";
+const LONGEST_PATH_FROM_ROOT = "longestPathFromRoot";
+const NODE_ID = "nodeId";
+
+const BLUE = {
+  border:'#2B7CE9',
+  background : '#D2E5FF',
+  highlight : {
+  border : '#2B7CE9',
+  background : '#D2E5FF'
+  }
+}
+const GREEN = {
+  border : '#009900',
+  background : '#99E699',
+  highlight : {
+  border : '#009900',
+    background : '#99E699'
+  }
+}
+const PINK = 'pink';
+const LIGHT_GRAY = 'rgba(200,200,200,0.5)';
+
+
+/**
+* Displays query plan as a graph as well as information about a selected stage.
+*/
+function visualizeJsonPlan(displayGraphElement, displayStagePlanElement, displayStatisticsElement,
+  displayStatisticsElementHead, displayStatisticsElementBody, jsonPlan, jsonStatuses, jsonStats,
+  jsonLogs, displayInformationTextIndent = DEFAULT_TEXT_INDENT) {
+
+  setUpInstructions(displayStagePlanElement, displayStatisticsElementHead);
+  networkData = getNodesAndEdges(jsonPlan["STAGE DEPENDENCIES"]);
+  network = createNetwork(networkData.nodes, networkData.edges, displayGraphElement);
+  network.on('click', function (params) {
+    displayStagePlan(params, displayStagePlanElement, jsonPlan, displayInformationTextIndent);
+    if (document.getElementById(displayStatisticsElement) != null) {
+      displayStatistics(params, displayStatisticsElement, displayStatisticsElementHead,
+        displayStatisticsElementBody, jsonStats, jsonStatuses, jsonLogs,
+        displayInformationTextIndent);
+    }
+  });
+  colorTasks(networkData.edges, networkData.nodes, jsonStatuses)
+  return network;
+}
+
+
+function setUpInstructions(displayStagePlanElement, displayStatisticsElementHead) {
+  document.getElementById(displayStagePlanElement).innerHTML =
+    "Click on a stage to view its plan";
+  if (document.getElementById(displayStatisticsElementHead) != null) {
+    document.getElementById(displayStatisticsElementHead).innerHTML =
+      "Click on a colored-in stage to view stats";
+  }
+}
+
+function colorTasks(edges, nodes, jsonStatuses) {
+  grayFilteredOutTasks(edges, nodes, jsonStatuses);
+  showSuccessfulTasks(edges, nodes, jsonStatuses);
+  showRunningTasks(nodes, jsonStatuses);
+  showFailedTasks(nodes, jsonStatuses);
+  }
+
+/**
+* Set color of node or edge
+*/
+function setColor(dataSet, itemToChange, newColor) {
+  itemToChange.color = newColor;
+  dataSet.update(itemToChange);
+}
+
+
+/**
+* Colors all edges connected to the node.
+*/
+function setAllNodeEdgesColor(edgesDataSet, nodeId, newColor) {
+  var theseEdges = edgesDataSet.get({
+    filter: function(item) {
+      return (item.to == nodeId || item.from == nodeId);
+    }});
+  for (var i = theseEdges.length - 1; i >= 0; i--) {
+    setColor(edgesDataSet, theseEdges[i], newColor);
+  }
+}
+
+
+/**
+* If two nodes are the same color, change the color of their connecting edge to match
+*/
+function matchAllEdgeColors(nodesDataSet, edgesDataSet) {
+  for (node in nodesDataSet.getIds()) {
+      nodeId = nodesDataSet.getIds()[node];
+    var toEdges = edgesDataSet.get({
+      filter: function (item) {
+        return item.to == nodeId;
+      }});
+    for (var i = toEdges.length - 1; i >= 0; i--) {
+      if (nodesDataSet.get(nodeId).color == nodesDataSet.get(parseInt(toEdges[i].from)).color) {
+        setColor(edgesDataSet, toEdges[i], nodesDataSet.get(nodeId).color.border);
+      }
+    }
+  }
+}
+
+
+/**
+* If a task is not filtered out by conditional statements, it will be in jsonStatuses.
+* Set all other tasks' node & edge colors to light gray
+*/
+function grayFilteredOutTasks(edgesDataSet, nodesDataSet, jsonStatuses) {
+
+  for (node in nodesDataSet.getIds()) {
+    nodeId = nodesDataSet.getIds()[node];
+    var taskId = "Stage-" + nodeId;
+    var otherTaskId = nodesDataSet.get(nodeId).id;
+    if (!Object.keys(jsonStatuses).includes(taskId)) {
+      setColor(nodesDataSet, nodesDataSet.get(nodeId), LIGHT_GRAY);
+      setAllNodeEdgesColor(edgesDataSet, nodeId, LIGHT_GRAY);
+    }
+  }
+}
+
+
+/**
+* If a task is successful, color it green!
+*/
+function showSuccessfulTasks(edgesDataSet, nodesDataSet, jsonStatuses) {
+  changeNodeColorByStatus(nodesDataSet, jsonStatuses, SUCCESS, GREEN)
+  matchAllEdgeColors(nodesDataSet, edgesDataSet);
+}
+
+function showRunningTasks(nodesDataSet, jsonStatuses) {
+  changeNodeColorByStatus(nodesDataSet, jsonStatuses, RUNNING, BLUE)
+}
+
+function showFailedTasks(nodesDataSet, jsonStatuses) {
+  changeNodeColorByStatus(nodesDataSet, jsonStatuses, FAILED, PINK)
+}
+
+function changeNodeColorByStatus(nodesDataSet, jsonStatuses, statusString, color) {
+  for (node in nodesDataSet.getIds()) {
+    nodeId = nodesDataSet.getIds()[node];
+    var taskId = "Stage-" + nodeId;
+    if (Object.keys(jsonStatuses).includes(taskId)) {
+      if (jsonStatuses[taskId].search(statusString) != -1) {
+        setColor(nodesDataSet, nodesDataSet.get(nodeId), color);
+      }
+    }
+  }
+}
+
+
+/**
+* Removes all non-number characters from string, casts to integer
+* @param {Object} stageDependencies - json object containing node and edge information
+* returns {nodes, edges} - javascript arrayObjects containing node, edge info readable by vis.js
+*/
+function getNodesAndEdges(stageDependencies) {
+  var nodes = [];
+  var edges = [];
+  var paths = [];
+  var childNodes = [];
+  var nodeIndex = 0;
+  var edgeIndex = 0;
+  var newNode = {};
+  var newEdge = {};
+  var nodeId = 0;
+  var newLabel = "";
+  for (stage in stageDependencies) {
+    newNode = {};
+    nodeId = getStageNumber(stage);
+    newNode['id'] = nodeId;
+    newLabel = nodeId + " - " + stageDependencies[stage]["TASK TYPE"];
+    if (stageDependencies[stage]["ROOT STAGE"] == "TRUE") {
+      newLabel += " - ROOT";
+      //create new path
+      paths.push([nodeId]);
+    }
+    else {
+      childNodes.push(nodeId);
+    }
+    newNode['label'] = newLabel;
+    if (stageDependencies[stage]["CONDITIONAL CHILD TASKS"] != null) {
+      newNode['shape'] = 'text';
+    }
+    nodes[nodeIndex] = newNode;
+    edgeIndex = linkStages("DEPENDENT STAGES", stageDependencies, stage, nodeId, edgeIndex, edges);
+    edgeIndex = linkStages("CONDITIONAL CHILD TASKS", stageDependencies, stage, nodeId, edgeIndex,
+      edges, true);
+
+    nodeIndex++;
+  }
+
+  for (var i = paths.length - 1; i >= 0; i--) {
+    for (ndx in childNodes) {
+      paths[i].push(childNodes[ndx]);
+    }
+  }
+  assignNodeLevel(paths, nodes, edges);
+
+  var nodesDataSet = new vis.DataSet(nodes);
+  var edgesDataSet = new vis.DataSet(edges);
+  return {
+    nodes: nodesDataSet,
+    edges: edgesDataSet
+  }
+}
+
+
+/**
+* Make each node's level in hierarchy longest path from any root
+*/
+function assignNodeLevel(paths, nodes, edges) {
+
+  orderPathsByEdgeDirection(paths, edges);
+  for (pathNdx in paths) {
+    prepareMap(paths[pathNdx]);
+    assignLongestPathFromRoot(paths[pathNdx], edges)
+  }
+  assignLevelByLongestPathFromAllRoots(paths, nodes);
+}
+
+
+function orderPathsByEdgeDirection(paths, edges) {
+  for (pathNdx in paths) {
+    var currentPath = paths[pathNdx];
+    var orderedPath = []
+    var listOfEdges = edges.slice(0);
+    for (var edgeNdx = 0; edgeNdx < listOfEdges.length; edgeNdx++) {
+      if (!(currentPath.includes(listOfEdges[edgeNdx].to) && currentPath.includes(listOfEdges[edgeNdx].from))) {
+        listOfEdges.splice(edgeNdx, 1);
+        edgeNdx--;
+      }
+    }
+    var count = 0;
+    while (currentPath.length != 0 && count < MAX_ORDER_TRIES) {
+      for (var nodeNdx in currentPath) {
+        var node = currentPath[nodeNdx];
+        var edgesIn = 0;
+        for (edgeNdx in listOfEdges) {
+          if (listOfEdges[edgeNdx].to == node) {
+            edgesIn++;
+            break;
+          }
+        }
+        if (edgesIn == 0) {
+          orderedPath.push(node);
+          currentPath.splice(nodeNdx, 1);
+          for (var edgeNdx = 0; edgeNdx < listOfEdges.length; edgeNdx++) {
+            if (listOfEdges[edgeNdx].from == node || listOfEdges[edgeNdx].to == node) {
+              listOfEdges.splice(edgeNdx, 1);
+              edgeNdx--;
+            }
+          }
+        }
+      }
+      count++;
+    }
+    paths[pathNdx] = orderedPath;
+  }
+}
+
+
+function prepareMap(currentPath) {
+  for (currentPathNdx in currentPath) {
+    nodeId = currentPath[currentPathNdx];
+    currentPath[currentPathNdx] = {};
+    currentPath[currentPathNdx][NODE_ID] = nodeId;
+    if (currentPathNdx == 0) {
+      currentPath[currentPathNdx][LONGEST_PATH_FROM_ROOT] = 0;
+    }
+    else {
+      currentPath[currentPathNdx][LONGEST_PATH_FROM_ROOT] = -Infinity;
+    }
+  }
+}
+
+
+function assignLongestPathFromRoot(currentPath, edges) {
+  for (var nodeIndex = 1; nodeIndex <= currentPath.length - 1; nodeIndex++) { //skips index 0, root
+    var node = currentPath[nodeIndex];
+    var nodeId = node[NODE_ID];
+    for (var prevNodeIndex = 0; prevNodeIndex < nodeIndex; prevNodeIndex++) {
+      var prevNode = currentPath[prevNodeIndex];
+      for (edgeNdx in edges) {
+        if (edges[edgeNdx].to == nodeId && edges[edgeNdx].from == prevNode.nodeId &&
+          currentPath[nodeIndex][LONGEST_PATH_FROM_ROOT] <= prevNode[LONGEST_PATH_FROM_ROOT]) {
+          currentPath[nodeIndex][LONGEST_PATH_FROM_ROOT] = prevNode[LONGEST_PATH_FROM_ROOT] + 1;
+        }
+      }
+    }
+  }
+}
+
+
+function assignLevelByLongestPathFromAllRoots(paths, nodes) {
+  for (nodeNdx in nodes) {
+    var nodeId = nodes[nodeNdx].id;
+    var longestPath = 0;
+    for (pathNdx in paths) {
+      currentPath = paths[pathNdx];
+      for (pathNodeNdx in currentPath) {
+        if (currentPath[pathNodeNdx][NODE_ID] == nodeId &&
+          currentPath[pathNodeNdx][LONGEST_PATH_FROM_ROOT] > longestPath) {
+          longestPath = currentPath[pathNodeNdx][LONGEST_PATH_FROM_ROOT];
+        }
+      }
+    }
+    nodes[nodeNdx]['level'] = longestPath;
+  }
+}
+
+/**
+* Creates a vis.js hierarchical network
+* @param {nodes} - vis DataSet, node info
+* @param {edges} - vis DataSet, edge info
+* @param {documentElement} - where to place the network
+*/
+function createNetwork(nodes, edges, documentElement) {
+  var data = {
+    nodes: nodes,
+    edges: edges
+  };
+  var container = document.getElementById(documentElement);
+  var options = {
+    layout: {
+      hierarchical: {
+        direction: 'LR',
+        sortMethod: 'directed',
+        levelSeparation: 150,
+        parentCentralization: true
+      }
+    },
+    edges: {
+      smooth: true,
+      arrows: {to : true }
+    },
+    physics: {
+      hierarchicalRepulsion: {
+        nodeDistance: 150
+      }
+    },
+    interaction: {
+      zoomView: false
+    }
+  };
+  var network = new vis.Network(container, data, options);
+  return network;
+}
+
+
+/**
+* Removes all non-number characters from string, casts to integer
+* @param {string} - string to mine the number from
+* returns {int} - the integer needed from the string
+*/
+function getStageNumber(string) {
+  return parseInt(string.replace( /^\D+/g, ''));
+}
+
+function prettifyJsonString(jsonString) {
+  return jsonString.replace(/{/g, "").replace(/}/g, "").replace(/,/g, "");
+}
+
+
+function addDashedEdge(parent, child, edgeIndex, edges) {
+  addEdge(parent, child, edgeIndex, edges);
+  edges[edgeIndex]['dashes'] = 'true';
+}
+
+
+function addEdge(parent, child, edgeIndex, edges) {
+  newEdge = {};
+  newEdge['from'] = parent;
+  newEdge['to'] = child;
+  newEdge['color'] = 'gray';
+  edges[edgeIndex] = newEdge;
+}
+
+
+/**
+* Add edge information
+* returns {int} edgeIndex - where we are in the list of edges
+*/
+function linkStages(linkType, data, stage, nodeId, edgeIndex, edges, dashes=false) {
+  if (data[stage][linkType] != null) {
+    linkedStages = data[stage][linkType].split(",");
+    for (index in linkedStages) {
+      if (dashes == true) {
+        addDashedEdge(nodeId, getStageNumber(linkedStages[index]), edgeIndex, edges);
+      }
+      else {
+        addEdge(getStageNumber(linkedStages[index]), nodeId, edgeIndex, edges);
+      }
+      edgeIndex ++
+    }
+  }
+  return edgeIndex;
+}
+
+
+function displayStagePlan(params, displayStagePlanElement, jsonPlan, textIndent) {
+  nodeId = params.nodes;
+  stageName = "Stage-" + nodeId;
+  if (nodeId != "") {
+    document.getElementById(displayStagePlanElement).innerHTML =
+      'Stage ' + nodeId + " plan:\n" +
+      prettifyJsonString(JSON.stringify(jsonPlan['STAGE PLANS'][stageName], null, textIndent));
+  }
+  //show nothing if no node is selected
+  else {
+    document.getElementById(displayStagePlanElement).innerHTML =
+      "Click on a stage to view its plan";
+  }
+}
+
+
+function displayStatistics(params, displayStatisticsElement, displayStatisticsElementHead,
+  displayStatisticsElementBody, jsonStats, jsonStatuses, jsonLogs, textIndent) {
+  if (document.getElementById(PROGRESS_ELEMENT)) {
+    document.getElementById(PROGRESS_ELEMENT).remove();
+  }
+  nodeId = params.nodes;
+  stageName = "Stage-" + nodeId;
+  if (nodeId != "" && jsonStatuses[stageName] != null) {
+    if (jsonStats[stageName] != null) {
+
+      document.getElementById(displayStatisticsElementHead).innerHTML =
+        "Stage " + nodeId + " statistics:\n" +
+        "Status: " + jsonStatuses[stageName] + "\n" +
+        "Logs: " + prettifyJsonString(JSON.stringify(jsonLogs, null, textIndent)) + "\n\n" +
+        "MapReduce job progress:\n";
+      document.getElementById(displayStatisticsElementBody).innerHTML =
+        prettifyJsonString(JSON.stringify(jsonStats[stageName], null, textIndent));
+
+      if (jsonStats[stageName][MAP_PROGRESS] != null) {
+        var mapProgress = jsonStats[stageName][MAP_PROGRESS];
+      }
+      if (jsonStats[stageName][REDUCE_PROGRESS] != null) {
+        var reduceProgress = jsonStats[stageName][REDUCE_PROGRESS];
+      }
+      var progressBar = getMainProgressBar(mapProgress, reduceProgress, PROGRESS_ELEMENT);
+      if (progressBar != null) {
+        document.getElementById(displayStatisticsElement).insertBefore(
+          progressBar, document.getElementById(displayStatisticsElementBody));
+      }
+    }
+    else { // stage without statistics
+      document.getElementById(displayStatisticsElementHead).innerHTML =
+        "Stage " + nodeId + " statistics:\n" +
+        "Status: " + jsonStatuses[stageName]+ "\n" +
+        "Logs: " + prettifyJsonString(JSON.stringify(jsonLogs, null, textIndent));
+      document.getElementById(displayStatisticsElementBody).innerHTML = '';
+    }
+  }
+  //show nothing if no node is selected or selected node isn't executed
+  else {
+    document.getElementById(displayStatisticsElementHead).innerHTML =
+      "Click on a colored-in stage to view stats";
+    document.getElementById(displayStatisticsElementBody).innerHTML = '';
+  }
+}
+
+
+function getMainProgressBar(mapProgress, reduceProgress, elementId) {
+  var progressBar = document.createElement("div");
+  progressBar.className = "progress";
+  progressBar.id = elementId;
+  var numChildProgressBars = 2;
+  if (mapProgress == null || reduceProgress == null) {
+    numChildProgressBars = 1;
+  }
+  if (mapProgress != null) {
+    progressBar.appendChild(getSubProgressBar("map", mapProgress, progressBar,
+        numChildProgressBars));
+  }
+  if (reduceProgress != null) {
+    progressBar.appendChild(getSubProgressBar("reduce", reduceProgress, progressBar,
+        numChildProgressBars, 'green'));
+  }
+  if (mapProgress == null && reduceProgress == null) {
+    progressBar = null;
+  }
+  return progressBar;
+}
+
+
+function getSubProgressBar(type, progress, mainProgressBar, numChildProgressBars, color) {
+  var new_progress_bar = document.createElement("div");
+  new_progress_bar.id = "progress-" + type;
+  var className = "progress-bar";
+  if (color == "green") {
+    className += " progress-bar-success";
+  }
+  new_progress_bar.className = className;
+  new_progress_bar.role = "progressbar";
+  new_progress_bar['aria-valuenow'] = "0";
+  new_progress_bar['aria-valuemin'] = progress / numChildProgressBars;
+  new_progress_bar['aria-valuemax'] = "100";
+  new_progress_bar.style = "width: " + progress / numChildProgressBars + "%; min-width: 2em;";
+  new_progress_bar.innerHTML = progress + "% " + type;
+  return new_progress_bar;
+}
\ No newline at end of file