You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@hive.apache.org by li...@apache.org on 2017/02/13 05:33:02 UTC
hive git commit: HIVE-15860: RemoteSparkJobMonitor may hang when
RemoteDriver exits abnormally (Rui reviewed by Xuefu)
Repository: hive
Updated Branches:
refs/heads/master c67a6f49d -> 791066178
HIVE-15860: RemoteSparkJobMonitor may hang when RemoteDriver exits abnormally (Rui reviewed by Xuefu)
Project: http://git-wip-us.apache.org/repos/asf/hive/repo
Commit: http://git-wip-us.apache.org/repos/asf/hive/commit/79106617
Tree: http://git-wip-us.apache.org/repos/asf/hive/tree/79106617
Diff: http://git-wip-us.apache.org/repos/asf/hive/diff/79106617
Branch: refs/heads/master
Commit: 7910661781393e8cc0992ab6c135a18fa1a68912
Parents: c67a6f4
Author: Rui Li <li...@apache.org>
Authored: Mon Feb 13 13:32:28 2017 +0800
Committer: Rui Li <sh...@cn.ibm.com>
Committed: Mon Feb 13 13:32:28 2017 +0800
----------------------------------------------------------------------
.../hive/ql/exec/spark/status/RemoteSparkJobMonitor.java | 7 ++++++-
.../hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java | 8 ++++++++
2 files changed, 14 insertions(+), 1 deletion(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/hive/blob/79106617/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
index ef3d8f8..dd73f3e 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/RemoteSparkJobMonitor.java
@@ -21,6 +21,7 @@ package org.apache.hadoop.hive.ql.exec.spark.status;
import java.util.Arrays;
import java.util.Map;
+import com.google.common.base.Preconditions;
import org.apache.hadoop.hive.conf.HiveConf;
import org.apache.hadoop.hive.ql.exec.Utilities;
import org.apache.hadoop.hive.ql.exec.spark.status.impl.RemoteSparkJobStatus;
@@ -103,6 +104,10 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
printStatus(progressMap, lastProgressMap);
lastProgressMap = progressMap;
+ } else if (sparkJobState == null) {
+ // in case the remote context crashes between JobStarted and JobSubmitted
+ Preconditions.checkState(sparkJobStatus.isRemoteActive(),
+ "Remote context becomes inactive.");
}
break;
case SUCCEEDED:
@@ -150,7 +155,7 @@ public class RemoteSparkJobMonitor extends SparkJobMonitor {
}
} catch (Exception e) {
String msg = " with exception '" + Utilities.getNameMessage(e) + "'";
- msg = "Failed to monitor Job[ " + sparkJobStatus.getJobId() + "]" + msg;
+ msg = "Failed to monitor Job[" + sparkJobStatus.getJobId() + "]" + msg;
// Has to use full name to make sure it does not conflict with
// org.apache.commons.lang.StringUtils
http://git-wip-us.apache.org/repos/asf/hive/blob/79106617/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
----------------------------------------------------------------------
diff --git a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
index 0e3e541..951dbb4 100644
--- a/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
+++ b/ql/src/java/org/apache/hadoop/hive/ql/exec/spark/status/impl/RemoteSparkJobStatus.java
@@ -151,6 +151,14 @@ public class RemoteSparkJobStatus implements SparkJobStatus {
this.error = e;
}
+ /**
+ * Indicates whether the remote context is active. SparkJobMonitor can use this to decide whether
+ * to stop monitoring.
+ */
+ public boolean isRemoteActive() {
+ return sparkClient.isActive();
+ }
+
private SparkJobInfo getSparkJobInfo() throws HiveException {
Integer sparkJobId = jobHandle.getSparkJobIds().size() == 1
? jobHandle.getSparkJobIds().get(0) : null;