aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--python/pyspark/sql/conf.py1
-rw-r--r--python/pyspark/sql/session.py3
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala7
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala (renamed from sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala)16
4 files changed, 11 insertions, 16 deletions
diff --git a/python/pyspark/sql/conf.py b/python/pyspark/sql/conf.py
index 7428c91991..609d882a95 100644
--- a/python/pyspark/sql/conf.py
+++ b/python/pyspark/sql/conf.py
@@ -23,7 +23,6 @@ class RuntimeConfig(object):
"""User-facing configuration API, accessible through `SparkSession.conf`.
Options set here are automatically propagated to the Hadoop configuration during I/O.
- This a thin wrapper around its Scala implementation org.apache.spark.sql.RuntimeConfig.
"""
def __init__(self, jconf):
diff --git a/python/pyspark/sql/session.py b/python/pyspark/sql/session.py
index fb3e318163..04842f6185 100644
--- a/python/pyspark/sql/session.py
+++ b/python/pyspark/sql/session.py
@@ -71,9 +71,6 @@ class SparkSession(object):
.config("spark.some.config.option", "some-value") \
.getOrCreate()
- :param sparkContext: The :class:`SparkContext` backing this SparkSession.
- :param jsparkSession: An optional JVM Scala SparkSession. If set, we do not instantiate a new
- SparkSession in the JVM, instead we make all calls to this object.
"""
class Builder(object):
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
index 4fd6e42640..7e07e0cb84 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/RuntimeConfig.scala
@@ -35,9 +35,8 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
*
* @since 2.0.0
*/
- def set(key: String, value: String): RuntimeConfig = {
+ def set(key: String, value: String): Unit = {
sqlConf.setConfString(key, value)
- this
}
/**
@@ -45,7 +44,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
*
* @since 2.0.0
*/
- def set(key: String, value: Boolean): RuntimeConfig = {
+ def set(key: String, value: Boolean): Unit = {
set(key, value.toString)
}
@@ -54,7 +53,7 @@ class RuntimeConfig private[sql](sqlConf: SQLConf = new SQLConf) {
*
* @since 2.0.0
*/
- def set(key: String, value: Long): RuntimeConfig = {
+ def set(key: String, value: Long): Unit = {
set(key, value.toString)
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala
index a629b73ac0..cfe2e9f2db 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/RuntimeConfigSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/RuntimeConfigSuite.scala
@@ -15,10 +15,9 @@
* limitations under the License.
*/
-package org.apache.spark.sql.internal
+package org.apache.spark.sql
import org.apache.spark.SparkFunSuite
-import org.apache.spark.sql.RuntimeConfig
class RuntimeConfigSuite extends SparkFunSuite {
@@ -26,10 +25,9 @@ class RuntimeConfigSuite extends SparkFunSuite {
test("set and get") {
val conf = newConf()
- conf
- .set("k1", "v1")
- .set("k2", 2)
- .set("k3", value = false)
+ conf.set("k1", "v1")
+ conf.set("k2", 2)
+ conf.set("k3", value = false)
assert(conf.get("k1") == "v1")
assert(conf.get("k2") == "2")
@@ -41,13 +39,15 @@ class RuntimeConfigSuite extends SparkFunSuite {
}
test("getOption") {
- val conf = newConf().set("k1", "v1")
+ val conf = newConf()
+ conf.set("k1", "v1")
assert(conf.getOption("k1") == Some("v1"))
assert(conf.getOption("notset") == None)
}
test("unset") {
- val conf = newConf().set("k1", "v1")
+ val conf = newConf()
+ conf.set("k1", "v1")
assert(conf.get("k1") == "v1")
conf.unset("k1")
intercept[NoSuchElementException] {