diff options
author | mathieu longtin <mathieu.longtin@nuance.com> | 2016-04-23 22:38:36 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-04-23 22:38:36 -0700 |
commit | 902c15c5e6da55754501c2e56bd6379b9d5f1194 (patch) | |
tree | 5717e57e204b5a5a12d8b85f26c6886176da7530 /python/pyspark | |
parent | 1672149c2644b5670b4b9a4086a4456fb8279a55 (diff) | |
download | spark-902c15c5e6da55754501c2e56bd6379b9d5f1194.tar.gz spark-902c15c5e6da55754501c2e56bd6379b9d5f1194.tar.bz2 spark-902c15c5e6da55754501c2e56bd6379b9d5f1194.zip |
Support single argument version of sqlContext.getConf
## What changes were proposed in this pull request?
In Python, sqlContext.getConf didn't allow getting the system default (getConf with one parameter).
Now the following are supported:
```
sqlContext.getConf(confName) # System default if not locally set, this is new
sqlContext.getConf(confName, myDefault) # myDefault if not locally set, old behavior
```
I also added doctests to this function. The original behavior does not change.
## How was this patch tested?
Manually, but doctests were added.
Author: mathieu longtin <mathieu.longtin@nuance.com>
Closes #12488 from mathieulongtin/pyfixgetconf3.
Diffstat (limited to 'python/pyspark')
-rw-r--r-- | python/pyspark/sql/context.py | 20 |
1 files changed, 17 insertions, 3 deletions
diff --git a/python/pyspark/sql/context.py b/python/pyspark/sql/context.py index 11dfcfe13e..ac98639f3a 100644 --- a/python/pyspark/sql/context.py +++ b/python/pyspark/sql/context.py @@ -146,13 +146,27 @@ class SQLContext(object): """ self._ssql_ctx.setConf(key, value) + @ignore_unicode_prefix @since(1.3) - def getConf(self, key, defaultValue): + def getConf(self, key, defaultValue=None): """Returns the value of Spark SQL configuration property for the given key. - If the key is not set, returns defaultValue. + If the key is not set and defaultValue is not None, return + defaultValue. If the key is not set and defaultValue is None, return + the system default value. + + >>> sqlContext.getConf("spark.sql.shuffle.partitions") + u'200' + >>> sqlContext.getConf("spark.sql.shuffle.partitions", "10") + u'10' + >>> sqlContext.setConf("spark.sql.shuffle.partitions", "50") + >>> sqlContext.getConf("spark.sql.shuffle.partitions", "10") + u'50' """ - return self._ssql_ctx.getConf(key, defaultValue) + if defaultValue is not None: + return self._ssql_ctx.getConf(key, defaultValue) + else: + return self._ssql_ctx.getConf(key) @property @since("1.3.1") |