aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/streaming/tests.py
diff options
context:
space:
mode:
authorHolden Karau <holden@pigscanfly.ca>2015-10-20 10:52:49 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-10-20 10:52:49 -0700
commite18b571c3374ecbfc0b20a5064cb58d57a2a7d21 (patch)
treee80aeebc7f8a247dff692f46a2bf7860a0e592dc /python/pyspark/streaming/tests.py
parent94139557c56cea318d4a4f82a4deaf72198f349a (diff)
downloadspark-e18b571c3374ecbfc0b20a5064cb58d57a2a7d21.tar.gz
spark-e18b571c3374ecbfc0b20a5064cb58d57a2a7d21.tar.bz2
spark-e18b571c3374ecbfc0b20a5064cb58d57a2a7d21.zip
[SPARK-10447][SPARK-3842][PYSPARK] upgrade pyspark to py4j0.9
Upgrade to Py4j0.9 Author: Holden Karau <holden@pigscanfly.ca> Author: Holden Karau <holden@us.ibm.com> Closes #8615 from holdenk/SPARK-10447-upgrade-pyspark-to-py4j0.9.
Diffstat (limited to 'python/pyspark/streaming/tests.py')
-rw-r--r--python/pyspark/streaming/tests.py18
1 files changed, 12 insertions, 6 deletions
diff --git a/python/pyspark/streaming/tests.py b/python/pyspark/streaming/tests.py
index e4e56fff3b..49634252fd 100644
--- a/python/pyspark/streaming/tests.py
+++ b/python/pyspark/streaming/tests.py
@@ -61,9 +61,12 @@ class PySparkStreamingTestCase(unittest.TestCase):
def tearDownClass(cls):
cls.sc.stop()
# Clean up in the JVM just in case there has been some issues in Python API
- jSparkContextOption = SparkContext._jvm.SparkContext.get()
- if jSparkContextOption.nonEmpty():
- jSparkContextOption.get().stop()
+ try:
+ jSparkContextOption = SparkContext._jvm.SparkContext.get()
+ if jSparkContextOption.nonEmpty():
+ jSparkContextOption.get().stop()
+ except:
+ pass
def setUp(self):
self.ssc = StreamingContext(self.sc, self.duration)
@@ -72,9 +75,12 @@ class PySparkStreamingTestCase(unittest.TestCase):
if self.ssc is not None:
self.ssc.stop(False)
# Clean up in the JVM just in case there has been some issues in Python API
- jStreamingContextOption = StreamingContext._jvm.SparkContext.getActive()
- if jStreamingContextOption.nonEmpty():
- jStreamingContextOption.get().stop(False)
+ try:
+ jStreamingContextOption = StreamingContext._jvm.SparkContext.getActive()
+ if jStreamingContextOption.nonEmpty():
+ jStreamingContextOption.get().stop(False)
+ except:
+ pass
def wait_for(self, result, n):
start_time = time.time()