You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by ai...@apache.org on 2018/06/08 19:02:44 UTC

hive git commit: HIVE-19053: RemoteSparkJobStatus#getSparkJobInfo treats all exceptions as timeout errors (Aihua Xu, reviewed by Sahil Takiar)

Repository: hive
Updated Branches:
  refs/heads/master 148635cb8 -> 913baef82


HIVE-19053: RemoteSparkJobStatus#getSparkJobInfo treats all exceptions as timeout errors (Aihua Xu, reviewed by Sahil Takiar)


Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/913baef8
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/913baef8
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/913baef8

Branch: refs/heads/master
Commit: 913baef8279f6682f7b209a1f4e7c445a85f17e9
Parents: 148635c
Author: Aihua Xu <ax...@cloudera.com>
Authored: Tue Jun 5 13:47:12 2018 -0700
Committer: Aihua Xu <ai...@apache.org>
Committed: Fri Jun 8 11:58:34 2018 -0700

----------------------------------------------------------------------
 ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java    |  2 ++
 .../exec/spark/status/impl/RemoteSparkJobStatus.java   | 13 ++++++++++---
 2 files changed, 12 insertions(+), 3 deletions(-)
----------------------------------------------------------------------


http://git-wip-us.apache.org/repos/asf/hive/blob/913baef8/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
index 8baf309..bc2cffa 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/ErrorMsg.java
@@ -585,6 +585,8 @@ public enum ErrorMsg {
       "Cannot create Spark client on a closed session {0}", true),
 
   SPARK_JOB_INTERRUPTED(30044, "Spark job was interrupted while executing"),
+  SPARK_GET_JOB_INFO_INTERRUPTED(30045, "Spark job was interrupted while getting job info"),
+  SPARK_GET_JOB_INFO_EXECUTIONERROR(30046, "Spark job failed in execution while getting job info due to exception {0}"),
 
   //========================== 40000 range starts here ========================//
 

http://git-wip-us.apache.org/repos/asf/hive/blob/913baef8/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
index d2e28b0..832832b 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
@@ -25,6 +25,8 @@ import org.apache.hadoop.hive.ql.exec.spark.status.SparkStage;
 import org.slf4j.Logger;
 import org.slf4j.LoggerFactory;
 
+import com.google.common.base.Throwables;
+
 import org.apache.hadoop.hive.ql.ErrorMsg;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatistics;
 import org.apache.hadoop.hive.ql.exec.spark.Statistic.SparkStatisticsBuilder;
@@ -37,7 +39,6 @@ import org.apache.hive.spark.client.JobContext;
 import org.apache.hive.spark.client.JobHandle;
 import org.apache.hive.spark.client.SparkClient;
 import org.apache.hive.spark.counter.SparkCounters;
-
 import org.apache.spark.JobExecutionStatus;
 import org.apache.spark.SparkJobInfo;
 import org.apache.spark.SparkStageInfo;
@@ -47,8 +48,10 @@ import java.io.Serializable;
 import java.util.HashMap;
 import java.util.List;
 import java.util.Map;
+import java.util.concurrent.ExecutionException;
 import java.util.concurrent.Future;
 import java.util.concurrent.TimeUnit;
+import java.util.concurrent.TimeoutException;
 
 /**
  * Used with remove spark client.
@@ -198,10 +201,14 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
         new GetJobInfoJob(jobHandle.getClientJobId(), sparkJobId));
     try {
       return getJobInfo.get(sparkClientTimeoutInSeconds, TimeUnit.SECONDS);
-    } catch (Exception e) {
-      LOG.warn("Failed to get job info.", e);
+    } catch (TimeoutException e) {
       throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_TIMEOUT,
           Long.toString(sparkClientTimeoutInSeconds));
+    } catch (InterruptedException e) {
+      throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_INTERRUPTED);
+    } catch (ExecutionException e) {
+      throw new HiveException(e, ErrorMsg.SPARK_GET_JOB_INFO_EXECUTIONERROR,
+          Throwables.getRootCause(e).getMessage());
     }
   }