You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by su...@apache.org on 2016/11/11 19:36:42 UTC

hive git commit: HIVE-15171: set SparkTask's jobID with application id (Zhihai Xu, reviewed by Chao Sun)

Repository: hive
Updated Branches:
  refs/heads/master 444af2072 -> 623d2921a


HIVE-15171: set SparkTask's jobID with application id (Zhihai Xu, reviewed by Chao Sun)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/623d2921
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/623d2921
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/623d2921

Branch: refs/heads/master
Commit: 623d2921a920d30df4c29d4fd5fc0f94c336402e
Parents: 444af20
Author: Zhihai Xu <zh...@gmail.com>
Authored: Fri Nov 11 11:36:00 2016 -0800
Committer: Chao Sun <su...@apache.org>
Committed: Fri Nov 11 11:36:00 2016 -0800

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java | 4 ++++
 1 file changed, 4 insertions(+)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/623d2921/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
index 6597a51..b23129b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/SparkTask.java
@@ -98,6 +98,7 @@ public class SparkTask extends Task<SparkWork> {
       perfLogger.PerfLogEnd(CLASS_NAME, PerfLogger.SPARK_SUBMIT_JOB);
 
       addToHistory(jobRef);
+      this.jobID = jobRef.getSparkJobStatus().getAppID();
       rc = jobRef.monitorJob();
       SparkJobStatus sparkJobStatus = jobRef.getSparkJobStatus();
       if (rc == 0) {
@@ -113,6 +114,9 @@ public class SparkTask extends Task<SparkWork> {
         // it's difficult to do it on hive side alone. See HIVE-12650.
         jobRef.cancelJob();
       }
+      if (this.jobID == null) {
+        this.jobID = sparkJobStatus.getAppID();
+      }
       sparkJobStatus.cleanup();
     } catch (Exception e) {
       String msg = "Failed to execute spark task, with exception '" + Utilities.getNameMessage(e) + "'";