From 31faec790f7b284b95907c66f0ee1914dd4c9fa2 Mon Sep 17 00:00:00 2001 From: Xiangrui Meng Date: Wed, 14 May 2014 14:57:17 -0700 Subject: [FIX] do not load defaults when testing SparkConf in pyspark The default constructor loads default properties, which can fail the test. Author: Xiangrui Meng Closes #775 from mengxr/pyspark-conf-fix and squashes the following commits: 83ef6c4 [Xiangrui Meng] do not load defaults when testing SparkConf in pyspark (cherry picked from commit 94c6c06ea13032b80610b3f54401d2ef2aa4874a) Signed-off-by: Reynold Xin --- python/pyspark/conf.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'python') diff --git a/python/pyspark/conf.py b/python/pyspark/conf.py index 49b68d57ab..8eff4a242a 100644 --- a/python/pyspark/conf.py +++ b/python/pyspark/conf.py @@ -33,7 +33,7 @@ u'My app' >>> sc.sparkHome == None True ->>> conf = SparkConf() +>>> conf = SparkConf(loadDefaults=False) >>> conf.setSparkHome("/path") >>> conf.get("spark.home") -- cgit v1.2.3