You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by ma...@apache.org on 2014/07/28 07:55:02 UTC
git commit: [SPARK-1550] [PySpark] Allow SparkContext creation after
failed attempts
Repository: spark
Updated Branches:
refs/heads/master d7eac4c3d -> a7d145e98
[SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
This addresses a PySpark issue where a failed attempt to construct SparkContext would prevent any future SparkContext creation.
Author: Josh Rosen <jo...@apache.org>
Closes #1606 from JoshRosen/SPARK-1550 and squashes the following commits:
ec7fadc [Josh Rosen] [SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/a7d145e9
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/a7d145e9
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/a7d145e9
Branch: refs/heads/master
Commit: a7d145e98c55fa66a541293930f25d9cdc25f3b4
Parents: d7eac4c
Author: Josh Rosen <jo...@apache.org>
Authored: Sun Jul 27 22:54:43 2014 -0700
Committer: Matei Zaharia <ma...@databricks.com>
Committed: Sun Jul 27 22:54:43 2014 -0700
----------------------------------------------------------------------
python/pyspark/context.py | 18 ++++++++++++------
python/pyspark/tests.py | 6 ++++++
2 files changed, 18 insertions(+), 6 deletions(-)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/a7d145e9/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index e8ac989..830a6ee 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -100,7 +100,16 @@ class SparkContext(object):
tempNamedTuple = namedtuple("Callsite", "function file linenum")
self._callsite = tempNamedTuple(function=None, file=None, linenum=None)
SparkContext._ensure_initialized(self, gateway=gateway)
-
+ try:
+ self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
+ conf)
+ except:
+ # If an error occurs, clean up in order to allow future SparkContext creation:
+ self.stop()
+ raise
+
+ def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer,
+ conf):
self.environment = environment or {}
self._conf = conf or SparkConf(_jvm=self._jvm)
self._batchSize = batchSize # -1 represents an unlimited batch size
@@ -249,17 +258,14 @@ class SparkContext(object):
"""
return self._jsc.sc().defaultMinPartitions()
- def __del__(self):
- self.stop()
-
def stop(self):
"""
Shut down the SparkContext.
"""
- if self._jsc:
+ if getattr(self, "_jsc", None):
self._jsc.stop()
self._jsc = None
- if self._accumulatorServer:
+ if getattr(self, "_accumulatorServer", None):
self._accumulatorServer.shutdown()
self._accumulatorServer = None
with SparkContext._lock:
http://git-wip-us.apache.org/repos/asf/spark/blob/a7d145e9/python/pyspark/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index 8ba5146..63cc5e9 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -209,6 +209,12 @@ class TestAddFile(PySparkTestCase):
class TestRDDFunctions(PySparkTestCase):
+ def test_failed_sparkcontext_creation(self):
+ # Regression test for SPARK-1550
+ self.sc.stop()
+ self.assertRaises(Exception, lambda: SparkContext("an-invalid-master-name"))
+ self.sc = SparkContext("local")
+
def test_save_as_textfile_with_unicode(self):
# Regression test for SPARK-970
x = u"\u00A1Hola, mundo!"