You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by gu...@apache.org on 2023/03/02 00:15:07 UTC
[spark] branch branch-3.4 updated: [SPARK-42637][CONNECT] Add SparkSession.stop()
This is an automated email from the ASF dual-hosted git repository.
gurwls223 pushed a commit to branch branch-3.4
in repository https://gitbox.apache.org/repos/asf/spark.git
The following commit(s) were added to refs/heads/branch-3.4 by this push:
new 93289f25e11 [SPARK-42637][CONNECT] Add SparkSession.stop()
93289f25e11 is described below
commit 93289f25e11efc2aa8b680285a24810768ebaa97
Author: Herman van Hovell <he...@databricks.com>
AuthorDate: Thu Mar 2 09:14:33 2023 +0900
[SPARK-42637][CONNECT] Add SparkSession.stop()
### What changes were proposed in this pull request?
Add `SparkSession.stop()` to SparkSession.
### Why are the changes needed?
API parity.
### Does this PR introduce _any_ user-facing change?
Yes.
### How was this patch tested?
Manually tested it.
Closes #40239 from hvanhovell/SPARK-42637.
Authored-by: Herman van Hovell <he...@databricks.com>
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
(cherry picked from commit 1667d3152603c1f6f0fb691e0899839898090ec6)
Signed-off-by: Hyukjin Kwon <gu...@apache.org>
---
.../src/main/scala/org/apache/spark/sql/SparkSession.scala | 13 +++++++++++++
.../spark/sql/connect/client/CompatibilitySuite.scala | 1 -
.../spark/sql/connect/client/util/RemoteSparkSession.scala | 2 +-
3 files changed, 14 insertions(+), 2 deletions(-)
diff --git a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
index e72dc264727..12d984f150d 100644
--- a/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
+++ b/connector/connect/client/jvm/src/main/scala/org/apache/spark/sql/SparkSession.scala
@@ -296,6 +296,19 @@ class SparkSession private[sql] (
planIdGenerator.set(0)
}
+ /**
+ * Synonym for `close()`.
+ *
+ * @since 3.4.0
+ */
+ def stop(): Unit = close()
+
+ /**
+ * Close the [[SparkSession]]. This closes the connection, and the allocator. The latter will
+ * throw an exception if there are still open [[SparkResult]]s.
+ *
+ * @since 3.4.0
+ */
override def close(): Unit = {
client.shutdown()
allocator.close()
diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
index b96d2d372f3..8728c351780 100644
--- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
+++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/CompatibilitySuite.scala
@@ -167,7 +167,6 @@ class CompatibilitySuite extends ConnectFunSuite {
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.catalog"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.executeCommand"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.readStream"),
- ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.stop"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.this"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.setDefaultSession"),
ProblemFilters.exclude[Problem]("org.apache.spark.sql.SparkSession.clearDefaultSession"),
diff --git a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
index 0ec31ee9943..beae5bfa27e 100644
--- a/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
+++ b/connector/connect/client/jvm/src/test/scala/org/apache/spark/sql/connect/client/util/RemoteSparkSession.scala
@@ -155,7 +155,7 @@ trait RemoteSparkSession extends ConnectFunSuite with BeforeAndAfterAll {
override def afterAll(): Unit = {
try {
- if (spark != null) spark.close()
+ if (spark != null) spark.stop()
} catch {
case e: Throwable => debug(e)
}
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org