aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver/src
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2015-09-26 19:08:55 -0700
committerCheng Lian <lian@databricks.com>2015-09-26 19:08:55 -0700
commit6f94d56a95e8c3a410a8d0c6a24ccca043227ba9 (patch)
tree78c3dc4253db226fc04a9d8d7cf1af15d54d8d0f /sql/hive-thriftserver/src
parent6fcee906d2afb5d5c3c49e0a669637a87e82b910 (diff)
downloadspark-6f94d56a95e8c3a410a8d0c6a24ccca043227ba9.tar.gz
spark-6f94d56a95e8c3a410a8d0c6a24ccca043227ba9.tar.bz2
spark-6f94d56a95e8c3a410a8d0c6a24ccca043227ba9.zip
[SPARK-10845] [SQL] Makes spark.sql.hive.version a SQLConfEntry
When refactoring SQL options from plain strings to the strongly typed `SQLConfEntry`, `spark.sql.hive.version` wasn't migrated, and doesn't show up in the result of `SET -v`, as `SET -v` only shows public `SQLConfEntry` instances. This affects compatibility with Simba ODBC driver. This PR migrates this SQL option as a `SQLConfEntry` to fix this issue. Author: Cheng Lian <lian@databricks.com> Closes #8925 from liancheng/spark-10845/hive-version-conf.
Diffstat (limited to 'sql/hive-thriftserver/src')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala1
-rw-r--r--sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala27
2 files changed, 27 insertions, 1 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
index a0643cec0f..a4fd0c3ce9 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2.scala
@@ -55,7 +55,6 @@ object HiveThriftServer2 extends Logging {
@DeveloperApi
def startWithContext(sqlContext: HiveContext): Unit = {
val server = new HiveThriftServer2(sqlContext)
- sqlContext.setConf("spark.sql.hive.version", HiveContext.hiveExecutionVersion)
server.init(sqlContext.hiveconf)
server.start()
listener = new HiveThriftServer2Listener(server, sqlContext.conf)
diff --git a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
index b72249b3bf..19b2f24456 100644
--- a/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
+++ b/sql/hive-thriftserver/src/test/scala/org/apache/spark/sql/hive/thriftserver/HiveThriftServer2Suites.scala
@@ -21,6 +21,7 @@ import java.io.File
import java.net.URL
import java.sql.{Date, DriverManager, SQLException, Statement}
+import scala.collection.mutable
import scala.collection.mutable.ArrayBuffer
import scala.concurrent.ExecutionContext.Implicits.global
import scala.concurrent.duration._
@@ -431,6 +432,32 @@ class HiveThriftBinaryServerSuite extends HiveThriftJdbcTest {
}
)
}
+
+ test("Checks Hive version via SET -v") {
+ withJdbcStatement { statement =>
+ val resultSet = statement.executeQuery("SET -v")
+
+ val conf = mutable.Map.empty[String, String]
+ while (resultSet.next()) {
+ conf += resultSet.getString(1) -> resultSet.getString(2)
+ }
+
+ assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
+ }
+ }
+
+ test("Checks Hive version via SET") {
+ withJdbcStatement { statement =>
+ val resultSet = statement.executeQuery("SET")
+
+ val conf = mutable.Map.empty[String, String]
+ while (resultSet.next()) {
+ conf += resultSet.getString(1) -> resultSet.getString(2)
+ }
+
+ assert(conf.get("spark.sql.hive.version") === Some("1.2.1"))
+ }
+ }
}
class HiveThriftHttpServerSuite extends HiveThriftJdbcTest {