From 902c15c5e6da55754501c2e56bd6379b9d5f1194 Mon Sep 17 00:00:00 2001 From: mathieu longtin Date: Sat, 23 Apr 2016 22:38:36 -0700 Subject: Support single argument version of sqlContext.getConf ## What changes were proposed in this pull request? In Python, sqlContext.getConf didn't allow getting the system default (getConf with one parameter). Now the following are supported: ``` sqlContext.getConf(confName) # System default if not locally set, this is new sqlContext.getConf(confName, myDefault) # myDefault if not locally set, old behavior ``` I also added doctests to this function. The original behavior does not change. ## How was this patch tested? Manually, but doctests were added. Author: mathieu longtin Closes #12488 from mathieulongtin/pyfixgetconf3. --- python/pyspark/sql/context.py | 20 +++++++++++++++++--- 1 file changed, 17 insertions(+), 3 deletions(-) diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py index 11dfcfe13e..ac98639f3a 100644 --- a/python/pyspark/sql/context.py +++ b/python/pyspark/sql/context.py @@ -146,13 +146,27 @@ class SQLContext(object): """ self._ssql_ctx.setConf(key, value) + @ignore_unicode_prefix @since(1.3) - def getConf(self, key, defaultValue): + def getConf(self, key, defaultValue=None): """Returns the value of Spark SQL configuration property for the given key. - If the key is not set, returns defaultValue. + If the key is not set and defaultValue is not None, return + defaultValue. If the key is not set and defaultValue is None, return + the system default value. + + >>> sqlContext.getConf("spark.sql.shuffle.partitions") + u'200' + >>> sqlContext.getConf("spark.sql.shuffle.partitions", "10") + u'10' + >>> sqlContext.setConf("spark.sql.shuffle.partitions", "50") + >>> sqlContext.getConf("spark.sql.shuffle.partitions", "10") + u'50' """ - return self._ssql_ctx.getConf(key, defaultValue) + if defaultValue is not None: + return self._ssql_ctx.getConf(key, defaultValue) + else: + return self._ssql_ctx.getConf(key) @property @since("1.3.1") -- cgit v1.2.3