aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2014-01-01 23:21:34 -0500
committerMatei Zaharia <matei@databricks.com>2014-01-01 23:21:34 -0500
commit7e8d2e8a5c88d16c771923504c433491b109ab2a (patch)
treeff3aa8fa3460078007259a6a6479dc4aec27b50a /python
parent0f6060733da83a862038fd397875cdb49d8c144d (diff)
downloadspark-7e8d2e8a5c88d16c771923504c433491b109ab2a.tar.gz
spark-7e8d2e8a5c88d16c771923504c433491b109ab2a.tar.bz2
spark-7e8d2e8a5c88d16c771923504c433491b109ab2a.zip
Fix Python code after change of getOrElse
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/conf.py7
-rw-r--r--python/pyspark/context.py14
2 files changed, 14 insertions, 7 deletions
diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index c111e2e90f..d72aed6a30 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -134,7 +134,12 @@ class SparkConf(object):
def get(self, key, defaultValue=None):
"""Get the configured value for some key, or return a default otherwise."""
- return self._jconf.get(key, defaultValue)
+ if defaultValue == None: # Py4J doesn't call the right get() if we pass None
+ if not self._jconf.contains(key):
+ return None
+ return self._jconf.get(key)
+ else:
+ return self._jconf.get(key, defaultValue)
def getAll(self):
"""Get all values as a list of key-value pairs."""
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index d77dd76765..f955aad7a4 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -92,11 +92,13 @@ class SparkContext(object):
self.serializer = BatchedSerializer(self._unbatched_serializer,
batchSize)
- # Set parameters passed directly to us on the conf; these operations will be
- # no-ops if the parameters were None
- self._conf.setMaster(master)
- self._conf.setAppName(appName)
- self._conf.setSparkHome(sparkHome)
+ # Set any parameters passed directly to us on the conf
+ if master:
+ self._conf.setMaster(master)
+ if appName:
+ self._conf.setAppName(appName)
+ if sparkHome:
+ self._conf.setSparkHome(sparkHome)
if environment:
for key, value in environment.iteritems():
self._conf.setExecutorEnv(key, value)
@@ -111,7 +113,7 @@ class SparkContext(object):
# the classpath or an external config file
self.master = self._conf.get("spark.master")
self.appName = self._conf.get("spark.app.name")
- self.sparkHome = self._conf.getOrElse("spark.home", None)
+ self.sparkHome = self._conf.get("spark.home", None)
for (k, v) in self._conf.getAll():
if k.startswith("spark.executorEnv."):
varName = k[len("spark.executorEnv."):]