aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/session.py
diff options
context:
space:
mode:
authorAndrew Or <andrew@databricks.com>2016-04-29 20:46:07 -0700
committerYin Huai <yhuai@databricks.com>2016-04-29 20:46:07 -0700
commit66773eb8a55bfe6437dd4096c2c55685aca29dcd (patch)
tree35e6a60ea8d70b2bc487c607ae605b9df4a05576 /python/pyspark/sql/session.py
parentb056e8cb0a7c58c3e4d199af3ee13be50305b747 (diff)
downloadspark-66773eb8a55bfe6437dd4096c2c55685aca29dcd.tar.gz
spark-66773eb8a55bfe6437dd4096c2c55685aca29dcd.tar.bz2
spark-66773eb8a55bfe6437dd4096c2c55685aca29dcd.zip
[SPARK-15012][SQL] Simplify configuration API further
## What changes were proposed in this pull request? 1. Remove all the `spark.setConf` etc. Just expose `spark.conf` 2. Make `spark.conf` take in things set in the core `SparkConf` as well, otherwise users may get confused This was done for both the Python and Scala APIs. ## How was this patch tested? `SQLConfSuite`, python tests. This one fixes the failed tests in #12787 Closes #12787 Author: Andrew Or <andrew@databricks.com> Author: Yin Huai <yhuai@databricks.com> Closes #12798 from yhuai/conf-api.
Diffstat (limited to 'python/pyspark/sql/session.py')
-rw-r--r--python/pyspark/sql/session.py29
1 files changed, 0 insertions, 29 deletions
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index c2452613ba..35c36b4935 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -134,35 +134,6 @@ class SparkSession(object):
self._conf = RuntimeConfig(self._jsparkSession.conf())
return self._conf
- @since(2.0)
- def setConf(self, key, value):
- """
- Sets the given Spark SQL configuration property.
- """
- self._jsparkSession.setConf(key, value)
-
- @ignore_unicode_prefix
- @since(2.0)
- def getConf(self, key, defaultValue=None):
- """Returns the value of Spark SQL configuration property for the given key.
-
- If the key is not set and defaultValue is not None, return
- defaultValue. If the key is not set and defaultValue is None, return
- the system default value.
-
- >>> spark.getConf("spark.sql.shuffle.partitions")
- u'200'
- >>> spark.getConf("spark.sql.shuffle.partitions", "10")
- u'10'
- >>> spark.setConf("spark.sql.shuffle.partitions", "50")
- >>> spark.getConf("spark.sql.shuffle.partitions", "10")
- u'50'
- """
- if defaultValue is not None:
- return self._jsparkSession.getConf(key, defaultValue)
- else:
- return self._jsparkSession.getConf(key)
-
@property
@since(2.0)
def catalog(self):