aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala8
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala4
2 files changed, 7 insertions, 5 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
index 20ebe4996c..fdb56901f9 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveContext.scala
@@ -231,12 +231,13 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
@transient protected[hive] lazy val sessionState = {
val ss = new SessionState(hiveconf)
setConf(hiveconf.getAllProperties) // Have SQLConf pick up the initial set of HiveConf.
+ SessionState.start(ss)
+ ss.err = new PrintStream(outputBuffer, true, "UTF-8")
+ ss.out = new PrintStream(outputBuffer, true, "UTF-8")
+
ss
}
- sessionState.err = new PrintStream(outputBuffer, true, "UTF-8")
- sessionState.out = new PrintStream(outputBuffer, true, "UTF-8")
-
override def setConf(key: String, value: String): Unit = {
super.setConf(key, value)
runSqlHive(s"SET $key=$value")
@@ -273,7 +274,6 @@ class HiveContext(sc: SparkContext) extends SQLContext(sc) {
results
}
- SessionState.start(sessionState)
/**
* Execute the command using Hive and return the results as a sequence. Each element
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
index 70fb15259e..4a999b98ad 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/TestHive.scala
@@ -40,8 +40,10 @@ import org.apache.spark.sql.SQLConf
/* Implicit conversions */
import scala.collection.JavaConversions._
+// SPARK-3729: Test key required to check for initialization errors with config.
object TestHive
- extends TestHiveContext(new SparkContext("local[2]", "TestSQLContext", new SparkConf()))
+ extends TestHiveContext(
+ new SparkContext("local[2]", "TestSQLContext", new SparkConf().set("spark.sql.test", "")))
/**
* A locally running test instance of Spark's Hive execution engine.