aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorMatei Zaharia <matei@databricks.com>2013-12-29 22:19:33 -0500
committerMatei Zaharia <matei@databricks.com>2013-12-29 22:19:33 -0500
commit994f080f8ae3372366e6004600ba791c8a372ff0 (patch)
tree2b4ef5363c5a881dd98e98ca9eecd3c3d5f57371 /python
parenteaa8a68ff08304f713f4f75d39c61c020e0e691d (diff)
downloadspark-994f080f8ae3372366e6004600ba791c8a372ff0.tar.gz
spark-994f080f8ae3372366e6004600ba791c8a372ff0.tar.bz2
spark-994f080f8ae3372366e6004600ba791c8a372ff0.zip
Properly show Spark properties on web UI, and change app name property
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/conf.py2
-rw-r--r--python/pyspark/context.py4
2 files changed, 3 insertions, 3 deletions
diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py
index cf98b0e071..c07dd88307 100644
--- a/python/pyspark/conf.py
+++ b/python/pyspark/conf.py
@@ -23,7 +23,7 @@
<pyspark.conf.SparkConf object at ...>
>>> conf.get("spark.master")
u'local'
->>> conf.get("spark.appName")
+>>> conf.get("spark.app.name")
u'My app'
>>> sc = SparkContext(conf=conf)
>>> sc.master
diff --git a/python/pyspark/context.py b/python/pyspark/context.py
index 8b028027eb..12ac0299e2 100644
--- a/python/pyspark/context.py
+++ b/python/pyspark/context.py
@@ -104,13 +104,13 @@ class SparkContext(object):
# Check that we have at least the required parameters
if not self.conf.contains("spark.master"):
raise Exception("A master URL must be set in your configuration")
- if not self.conf.contains("spark.appName"):
+ if not self.conf.contains("spark.app.name"):
raise Exception("An application name must be set in your configuration")
# Read back our properties from the conf in case we loaded some of them from
# the classpath or an external config file
self.master = self.conf.get("spark.master")
- self.appName = self.conf.get("spark.appName")
+ self.appName = self.conf.get("spark.app.name")
self.sparkHome = self.conf.getOrElse("spark.home", None)
for (k, v) in self.conf.getAll():
if k.startswith("spark.executorEnv."):