From 594a1bf200fea8d6bcf25839a49186f66f922bc8 Mon Sep 17 00:00:00 2001 From: Eric Liang Date: Thu, 26 May 2016 12:05:47 -0700 Subject: [SPARK-15520][SQL] Also set sparkContext confs when using SparkSession builder in pyspark ## What changes were proposed in this pull request? Also sets confs in the underlying sc when using SparkSession.builder.getOrCreate(). This is a bug-fix from a post-merge comment in https://github.com/apache/spark/pull/13289 ## How was this patch tested? Python doc-tests. Author: Eric Liang Closes #13309 from ericl/spark-15520-1. --- python/pyspark/sql/session.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) (limited to 'python/pyspark/sql/session.py') diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py index 52e7f3d348..8f7dcb54a7 100644 --- a/python/pyspark/sql/session.py +++ b/python/pyspark/sql/session.py @@ -144,7 +144,7 @@ class SparkSession(object): default. >>> s1 = SparkSession.builder.config("k1", "v1").getOrCreate() - >>> s1.conf.get("k1") == "v1" + >>> s1.conf.get("k1") == s1.sparkContext.getConf().get("k1") == "v1" True In case an existing SparkSession is returned, the config options specified @@ -168,6 +168,8 @@ class SparkSession(object): session = SparkSession(sc) for key, value in self._options.items(): session.conf.set(key, value) + for key, value in self._options.items(): + session.sparkContext._conf.set(key, value) return session builder = Builder() -- cgit v1.2.3