You are viewing a plain text version of this content. The canonical link for it is here.
Posted to commits@spark.apache.org by rx...@apache.org on 2014/09/10 03:54:57 UTC
git commit: [SPARK-3458] enable python "with" statements for
SparkContext
Repository: spark
Updated Branches:
refs/heads/master c110614b3 -> 25b5b867d
[SPARK-3458] enable python "with" statements for SparkContext
allow for best practice code,
```
try:
sc = SparkContext()
app(sc)
finally:
sc.stop()
```
to be written using a "with" statement,
```
with SparkContext() as sc:
app(sc)
```
Author: Matthew Farrellee <ma...@redhat.com>
Closes #2335 from mattf/SPARK-3458 and squashes the following commits:
5b4e37c [Matthew Farrellee] [SPARK-3458] enable python "with" statements for SparkContext
Project: http://git-wip-us.apache.org/repos/asf/spark/repo
Commit: http://git-wip-us.apache.org/repos/asf/spark/commit/25b5b867
Tree: http://git-wip-us.apache.org/repos/asf/spark/tree/25b5b867
Diff: http://git-wip-us.apache.org/repos/asf/spark/diff/25b5b867
Branch: refs/heads/master
Commit: 25b5b867d5e18bac1c5bcdc6f8c63d97858194c7
Parents: c110614
Author: Matthew Farrellee <ma...@redhat.com>
Authored: Tue Sep 9 18:54:54 2014 -0700
Committer: Reynold Xin <rx...@apache.org>
Committed: Tue Sep 9 18:54:54 2014 -0700
----------------------------------------------------------------------
python/pyspark/context.py | 14 ++++++++++++++
python/pyspark/tests.py | 29 +++++++++++++++++++++++++++++
2 files changed, 43 insertions(+)
----------------------------------------------------------------------
http://git-wip-us.apache.org/repos/asf/spark/blob/25b5b867/python/pyspark/context.py
----------------------------------------------------------------------
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 5a30431..84bc0a3 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -232,6 +232,20 @@ class SparkContext(object):
else:
SparkContext._active_spark_context = instance
+ def __enter__(self):
+ """
+ Enable 'with SparkContext(...) as sc: app(sc)' syntax.
+ """
+ return self
+
+ def __exit__(self, type, value, trace):
+ """
+ Enable 'with SparkContext(...) as sc: app' syntax.
+
+ Specifically stop the context on exit of the with block.
+ """
+ self.stop()
+
@classmethod
def setSystemProperty(cls, key, value):
"""
http://git-wip-us.apache.org/repos/asf/spark/blob/25b5b867/python/pyspark/tests.py
----------------------------------------------------------------------
diff --git a/python/pyspark/tests.py b/python/pyspark/tests.py
index 0bd2a9e..bb84ebe 100644
--- a/python/pyspark/tests.py
+++ b/python/pyspark/tests.py
@@ -1254,6 +1254,35 @@ class TestSparkSubmit(unittest.TestCase):
self.assertIn("[2, 4, 6]", out)
+class ContextStopTests(unittest.TestCase):
+
+ def test_stop(self):
+ sc = SparkContext()
+ self.assertNotEqual(SparkContext._active_spark_context, None)
+ sc.stop()
+ self.assertEqual(SparkContext._active_spark_context, None)
+
+ def test_with(self):
+ with SparkContext() as sc:
+ self.assertNotEqual(SparkContext._active_spark_context, None)
+ self.assertEqual(SparkContext._active_spark_context, None)
+
+ def test_with_exception(self):
+ try:
+ with SparkContext() as sc:
+ self.assertNotEqual(SparkContext._active_spark_context, None)
+ raise Exception()
+ except:
+ pass
+ self.assertEqual(SparkContext._active_spark_context, None)
+
+ def test_with_stop(self):
+ with SparkContext() as sc:
+ self.assertNotEqual(SparkContext._active_spark_context, None)
+ sc.stop()
+ self.assertEqual(SparkContext._active_spark_context, None)
+
+
@unittest.skipIf(not _have_scipy, "SciPy not installed")
class SciPyTests(PySparkTestCase):
---------------------------------------------------------------------
To unsubscribe, e-mail: commits-unsubscribe@spark.apache.org
For additional commands, e-mail: commits-help@spark.apache.org