You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by su...@apache.org on 2016/12/05 19:18:14 UTC

hive git commit: HIVE-15301: Expose SparkStatistics information in SparkTask (Zhihai Xu, reviewed by Xuefu Zhang)

Repository: hive
Updated Branches:
  refs/heads/master a40122323 -> 7089ac7b6


HIVE-15301: Expose SparkStatistics information in SparkTask (Zhihai Xu, reviewed by Xuefu Zhang)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/7089ac7b
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/7089ac7b
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/7089ac7b

Branch: refs/heads/master
Commit: 7089ac7b68f65877cf625821b712b873315674e4
Parents: a401223
Author: Zhihai Xu <zh...@gmail.com>
Authored: Mon Dec 5 11:17:36 2016 -0800
Committer: Chao Sun <su...@apache.org>
Committed: Mon Dec 5 11:17:36 2016 -0800

----------------------------------------------------------------------
 .../apache/hadoop/hive/ql/exec/spark/SparkTask.java    | 13 ++++++++++++-
 1 file changed, 12 insertions(+), 1 deletion(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/7089ac7b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index b23129b..87e96a9 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -72,6 +72,8 @@ public class SparkTask extends Task<SparkWork> {
   private static final LogHelper console = new LogHelper(LOG);
   private final PerfLogger perfLogger = SessionState.getPerfLogger();
   private static final long serialVersionUID = 1L;
+  private transient String sparkJobID;
+  private transient SparkStatistics sparkStatistics;
 
   @Override
   public void initialize(QueryState queryState, QueryPlan queryPlan, DriverContext driverContext,
@@ -98,11 +100,12 @@ public class SparkTask extends Task<SparkWork> {
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_SUBMIT_JOB);
 
       addToHistory(jobRef);
+      sparkJobID = jobRef.getJobId();
       this.jobID = jobRef.getSparkJobStatus().getAppID();
       rc = jobRef.monitorJob();
       SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus();
       if (rc == 0) {
-        SparkStatistics sparkStatistics = sparkJobStatus.getSparkStatistics();
+        sparkStatistics = sparkJobStatus.getSparkStatistics();
         if (LOG.isInfoEnabled() && sparkStatistics != null) {
           LOG.info(String.format("=====Spark Job[%s] statistics=====", jobRef.getJobId()));
           logSparkStatistic(sparkStatistics);
@@ -228,6 +231,14 @@ public class SparkTask extends Task<SparkWork> {
     return ((ReduceWork) children.get(0)).getReducer();
   }
 
+  public String getSparkJobID() {
+    return sparkJobID;
+  }
+
+  public SparkStatistics getSparkStatistics() {
+    return sparkStatistics;
+  }
+
   /**
    * Set the number of reducers for the spark work.
    */