diff options
author | Josh Rosen <joshrosen@apache.org> | 2014-07-27 22:54:43 -0700 |
---|---|---|
committer | Matei Zaharia <matei@databricks.com> | 2014-07-27 22:54:43 -0700 |
commit | a7d145e98c55fa66a541293930f25d9cdc25f3b4 (patch) | |
tree | 624282a2fdcd3f2a237ca1ffdd011d842dbf51c4 /python/pyspark/context.py | |
parent | d7eac4c3db7462e60e0c456dc93780167f5fcb2c (diff) | |
download | spark-a7d145e98c55fa66a541293930f25d9cdc25f3b4.tar.gz spark-a7d145e98c55fa66a541293930f25d9cdc25f3b4.tar.bz2 spark-a7d145e98c55fa66a541293930f25d9cdc25f3b4.zip |
[SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
This addresses a PySpark issue where a failed attempt to construct SparkContext would prevent any future SparkContext creation.
Author: Josh Rosen <joshrosen@apache.org>
Closes #1606 from JoshRosen/SPARK-1550 and squashes the following commits:
ec7fadc [Josh Rosen] [SPARK-1550] [PySpark] Allow SparkContext creation after failed attempts
Diffstat (limited to 'python/pyspark/context.py')
-rw-r--r-- | python/pyspark/context.py | 18 |
1 files changed, 12 insertions, 6 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py index e8ac9895cf..830a6ee03f 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -100,7 +100,16 @@ class SparkContext(object): tempNamedTuple = namedtuple("Callsite", "function file linenum") self._callsite = tempNamedTuple(function=None, file=None, linenum=None) SparkContext._ensure_initialized(self, gateway=gateway) - + try: + self._do_init(master, appName, sparkHome, pyFiles, environment, batchSize, serializer, + conf) + except: + # If an error occurs, clean up in order to allow future SparkContext creation: + self.stop() + raise + + def _do_init(self, master, appName, sparkHome, pyFiles, environment, batchSize, serializer, + conf): self.environment = environment or {} self._conf = conf or SparkConf(_jvm=self._jvm) self._batchSize = batchSize # -1 represents an unlimited batch size @@ -249,17 +258,14 @@ class SparkContext(object): """ return self._jsc.sc().defaultMinPartitions() - def __del__(self): - self.stop() - def stop(self): """ Shut down the SparkContext. """ - if self._jsc: + if getattr(self, "_jsc", None): self._jsc.stop() self._jsc = None - if self._accumulatorServer: + if getattr(self, "_accumulatorServer", None): self._accumulatorServer.shutdown() self._accumulatorServer = None with SparkContext._lock: |