aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2016-06-16 17:06:24 -0700
committerShixiong Zhu <shixiong@databricks.com>2016-06-16 17:06:24 -0700
commitd9c6628c47de547dc537310e3c775c7f3e0e4a12 (patch)
tree44d897bb65296bb52bdee0a3f306bbf224740c53 /sql/core/src/test
parent62d2fa5e996d428caaea005041b17ec115473762 (diff)
downloadspark-d9c6628c47de547dc537310e3c775c7f3e0e4a12.tar.gz
spark-d9c6628c47de547dc537310e3c775c7f3e0e4a12.tar.bz2
spark-d9c6628c47de547dc537310e3c775c7f3e0e4a12.zip
[SPARK-15991] SparkContext.hadoopConfiguration should be always the base of hadoop conf created by SessionState
## What changes were proposed in this pull request? Before this patch, after a SparkSession has been created, hadoop conf set directly to SparkContext.hadoopConfiguration will not affect the hadoop conf created by SessionState. This patch makes the change to always use SparkContext.hadoopConfiguration as the base. This patch also changes the behavior of hive-site.xml support added in https://github.com/apache/spark/pull/12689/. With this patch, we will load hive-site.xml to SparkContext.hadoopConfiguration. ## How was this patch tested? New test in SparkSessionBuilderSuite. Author: Yin Huai <yhuai@databricks.com> Closes #13711 from yhuai/SPARK-15991.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala20
2 files changed, 20 insertions, 4 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index 545c1776b7..bbe821b768 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -2870,8 +2870,4 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
sql(s"SELECT '$literal' AS DUMMY"),
Row(s"$expected") :: Nil)
}
-
- test("SPARK-15887: hive-site.xml should be loaded") {
- assert(spark.sessionState.newHadoopConf().get("hive.in.test") == "true")
- }
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
index 786956df8a..418345b9ee 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SparkSessionBuilderSuite.scala
@@ -102,4 +102,24 @@ class SparkSessionBuilderSuite extends SparkFunSuite {
assert(session.sparkContext.conf.get("key2") == "value2")
session.stop()
}
+
+ test("SPARK-15887: hive-site.xml should be loaded") {
+ val session = SparkSession.builder().master("local").getOrCreate()
+ assert(session.sessionState.newHadoopConf().get("hive.in.test") == "true")
+ assert(session.sparkContext.hadoopConfiguration.get("hive.in.test") == "true")
+ session.stop()
+ }
+
+ test("SPARK-15991: Set global Hadoop conf") {
+ val session = SparkSession.builder().master("local").getOrCreate()
+ val mySpecialKey = "my.special.key.15991"
+ val mySpecialValue = "msv"
+ try {
+ session.sparkContext.hadoopConfiguration.set(mySpecialKey, mySpecialValue)
+ assert(session.sessionState.newHadoopConf().get(mySpecialKey) == mySpecialValue)
+ } finally {
+ session.sparkContext.hadoopConfiguration.unset(mySpecialKey)
+ session.stop()
+ }
+ }
}