aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/conf.py
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2013-12-29 14:31:45 -0500
committerMatei Zaharia <matei@databricks.com>2013-12-29 14:32:05 -0500
commit615fb649d66b13371927a051d249433d746c5f19 (patch)
tree5a3b3487b46517765d31cdc0f2c2f340c714666d /python/pyspark/conf.py
parentcd00225db9b90fc845fd1458831bdd9d014d1bb6 (diff)
downloadspark-615fb649d66b13371927a051d249433d746c5f19.tar.gz
spark-615fb649d66b13371927a051d249433d746c5f19.tar.bz2
spark-615fb649d66b13371927a051d249433d746c5f19.zip
Fix some other Python tests due to initializing JVM in a different way
The test in context.py created two different instances of the SparkContext class by copying "globals", so that some tests can have a global "sc" object and others can try initializing their own contexts. This led to two JVM gateways being created since SparkConf also looked at pyspark.context.SparkContext to get the JVM.
Diffstat (limited to 'python/pyspark/conf.py')
-rw-r--r--python/pyspark/conf.py5
1 files changed, 3 insertions, 2 deletions
diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index 56e615c287..eb7a6c13fe 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -50,10 +50,11 @@ u'value1'
class SparkConf(object):
- def __init__(self, loadDefaults=False):
+ def __init__(self, loadDefaults=True, _jvm=None):
from pyspark.context import SparkContext
SparkContext._ensure_initialized()
- self._jconf = SparkContext._jvm.SparkConf(loadDefaults)
+ _jvm = _jvm or SparkContext._jvm
+ self._jconf = _jvm.SparkConf(loadDefaults)
def set(self, key, value):
self._jconf.set(key, value)