[SPARK-3458] enable python "with" statements for SparkContext
allow for best practice code, ``` try: sc = SparkContext() app(sc) finally: sc.stop() ``` to be written using a "with" statement, ``` with SparkContext() as sc: app(sc) ``` Author: Matthew Farrellee <matt@redhat.com> Closes #2335 from mattf/SPARK-3458 and squashes the following commits: 5b4e37c [Matthew Farrellee] [SPARK-3458] enable python "with" statements for SparkContext
This commit is contained in:
parent
c110614b33
commit
25b5b867d5
|
@ -232,6 +232,20 @@ class SparkContext(object):
|
|||
else:
|
||||
SparkContext._active_spark_context = instance
|
||||
|
||||
def __enter__(self):
|
||||
"""
|
||||
Enable 'with SparkContext(...) as sc: app(sc)' syntax.
|
||||
"""
|
||||
return self
|
||||
|
||||
def __exit__(self, type, value, trace):
|
||||
"""
|
||||
Enable 'with SparkContext(...) as sc: app' syntax.
|
||||
|
||||
Specifically stop the context on exit of the with block.
|
||||
"""
|
||||
self.stop()
|
||||
|
||||
@classmethod
|
||||
def setSystemProperty(cls, key, value):
|
||||
"""
|
||||
|
|
|
@ -1254,6 +1254,35 @@ class TestSparkSubmit(unittest.TestCase):
|
|||
self.assertIn("[2, 4, 6]", out)
|
||||
|
||||
|
||||
class ContextStopTests(unittest.TestCase):
|
||||
|
||||
def test_stop(self):
|
||||
sc = SparkContext()
|
||||
self.assertNotEqual(SparkContext._active_spark_context, None)
|
||||
sc.stop()
|
||||
self.assertEqual(SparkContext._active_spark_context, None)
|
||||
|
||||
def test_with(self):
|
||||
with SparkContext() as sc:
|
||||
self.assertNotEqual(SparkContext._active_spark_context, None)
|
||||
self.assertEqual(SparkContext._active_spark_context, None)
|
||||
|
||||
def test_with_exception(self):
|
||||
try:
|
||||
with SparkContext() as sc:
|
||||
self.assertNotEqual(SparkContext._active_spark_context, None)
|
||||
raise Exception()
|
||||
except:
|
||||
pass
|
||||
self.assertEqual(SparkContext._active_spark_context, None)
|
||||
|
||||
def test_with_stop(self):
|
||||
with SparkContext() as sc:
|
||||
self.assertNotEqual(SparkContext._active_spark_context, None)
|
||||
sc.stop()
|
||||
self.assertEqual(SparkContext._active_spark_context, None)
|
||||
|
||||
|
||||
@unittest.skipIf(not _have_scipy, "SciPy not installed")
|
||||
class SciPyTests(PySparkTestCase):
|
||||
|
||||
|
|
Loading…
Reference in a new issue