diff options
author | Ewen Cheslack-Postava <me@ewencp.org> | 2013-10-22 11:26:49 -0700 |
---|---|---|
committer | Ewen Cheslack-Postava <me@ewencp.org> | 2013-10-22 11:26:49 -0700 |
commit | 317a9eb1ceb165a74493c452a6c5fc0f9b5e2760 (patch) | |
tree | dcbd147eb2bfea91de3d12627985aa98ddf8e3af /python | |
parent | 56d230e614d7d03a0c53e262071ab388abddd97f (diff) | |
download | spark-317a9eb1ceb165a74493c452a6c5fc0f9b5e2760.tar.gz spark-317a9eb1ceb165a74493c452a6c5fc0f9b5e2760.tar.bz2 spark-317a9eb1ceb165a74493c452a6c5fc0f9b5e2760.zip |
Pass self to SparkContext._ensure_initialized.
The constructor for SparkContext should pass in self so that we track
the current context and produce errors if another one is created. Add
a doctest to make sure creating multiple contexts triggers the
exception.
Diffstat (limited to 'python')
-rw-r--r-- | python/pyspark/context.py | 11 |
1 files changed, 10 insertions, 1 deletions
diff --git a/python/pyspark/context.py b/python/pyspark/context.py index 22f5d92a3b..a7ca8bc888 100644 --- a/python/pyspark/context.py +++ b/python/pyspark/context.py @@ -67,8 +67,17 @@ class SparkContext(object): @param batchSize: The number of Python objects represented as a single Java object. Set 1 to disable batching or -1 to use an unlimited batch size. + + + >>> from pyspark.context import SparkContext + >>> sc = SparkContext('local', 'test') + + >>> sc2 = SparkContext('local', 'test2') # doctest: +IGNORE_EXCEPTION_DETAIL + Traceback (most recent call last): + ... + ValueError:... """ - SparkContext._ensure_initialized() + SparkContext._ensure_initialized(self) self.master = master self.jobName = jobName |