You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by li...@apache.org on 2015/02/19 02:50:06 UTC
spark git commit: [SPARK-5846] Correctly set job description and pool
for SQL jobs
Repository: spark
Updated Branches:
refs/heads/master d12d2ad76 -> e945aa613
[SPARK-5846] Correctly set job description and pool for SQL jobs
marmbrus am I missing something obvious here? I verified that this fixes the problem for me (on 1.2.1) on EC2, but I'm confused about how others wouldn't have noticed this?
Author: Kay Ousterhout <ka...@gmail.com>
Closes #4630 from kayousterhout/SPARK-5846_1.3 and squashes the following commits:
2022ad4 [Kay Ousterhout] [SPARK-5846] Correctly set job description and pool for SQL jobs
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/e945aa61
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/e945aa61
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/e945aa61
Branch: refs/heads/master
Commit: e945aa6139e022d13ac793f46819cfee07b782fc
Parents: d12d2ad
Author: Kay Ousterhout <ka...@gmail.com>
Authored: Thu Feb 19 09:49:34 2015 +0800
Committer: Cheng Lian <li...@databricks.com>
Committed: Thu Feb 19 09:49:34 2015 +0800
----------------------------------------------------------------------
.../org/apache/spark/sql/hive/thriftserver/Shim12.scala | 8 ++++----
.../org/apache/spark/sql/hive/thriftserver/Shim13.scala | 8 ++++----
2 files changed, 8 insertions(+), 8 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/e945aa61/sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala
----------------------------------------------------------------------
diff --git a/sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala b/sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala
index ea9d61d..13116b4 100644
--- a/sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala
+++ b/sql/hive-thriftserver/v0.12.0/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim12.scala
@@ -185,6 +185,10 @@ private[hive] class SparkExecuteStatementOperation(
def run(): Unit = {
logInfo(s"Running query '$statement'")
setState(OperationState.RUNNING)
+ hiveContext.sparkContext.setJobDescription(statement)
+ sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
+ hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
+ }
try {
result = hiveContext.sql(statement)
logDebug(result.queryExecution.toString())
@@ -194,10 +198,6 @@ private[hive] class SparkExecuteStatementOperation(
logInfo(s"Setting spark.scheduler.pool=$value for future statements in this session.")
case _ =>
}
- hiveContext.sparkContext.setJobDescription(statement)
- sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
- hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
- }
iter = {
val useIncrementalCollect =
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean
http://git-wip-us.apache.org/repos/asf/spark/blob/e945aa61/sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala
----------------------------------------------------------------------
diff --git a/sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala b/sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala
index 71e3954..9b8faef 100644
--- a/sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala
+++ b/sql/hive-thriftserver/v0.13.1/src/main/scala/org/apache/spark/sql/hive/thriftserver/Shim13.scala
@@ -156,6 +156,10 @@ private[hive] class SparkExecuteStatementOperation(
def run(): Unit = {
logInfo(s"Running query '$statement'")
setState(OperationState.RUNNING)
+ hiveContext.sparkContext.setJobDescription(statement)
+ sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
+ hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
+ }
try {
result = hiveContext.sql(statement)
logDebug(result.queryExecution.toString())
@@ -165,10 +169,6 @@ private[hive] class SparkExecuteStatementOperation(
logInfo(s"Setting spark.scheduler.pool=$value for future statements in this session.")
case _ =>
}
- hiveContext.sparkContext.setJobDescription(statement)
- sessionToActivePool.get(parentSession.getSessionHandle).foreach { pool =>
- hiveContext.sparkContext.setLocalProperty("spark.scheduler.pool", pool)
- }
iter = {
val useIncrementalCollect =
hiveContext.getConf("spark.sql.thriftServer.incrementalCollect", "false").toBoolean
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org