aboutsummaryrefslogtreecommitdiff
path: root/sql/hive-thriftserver
diff options
context:
space:
mode:
authorRyan Blue <blue@apache.org>2016-11-07 17:36:15 -0800
committerReynold Xin <rxin@databricks.com>2016-11-07 17:36:15 -0800
commit9b0593d5e99bb919c4abb8d0836a126ec2eaf1d5 (patch)
treef8de2bf87e557fc312351ef1d281473ec99ee98f /sql/hive-thriftserver
parent3eda05703f02413540f180ade01f0f114e70b9cc (diff)
downloadspark-9b0593d5e99bb919c4abb8d0836a126ec2eaf1d5.tar.gz
spark-9b0593d5e99bb919c4abb8d0836a126ec2eaf1d5.tar.bz2
spark-9b0593d5e99bb919c4abb8d0836a126ec2eaf1d5.zip
[SPARK-18086] Add support for Hive session vars.
## What changes were proposed in this pull request? This adds support for Hive variables: * Makes values set via `spark-sql --hivevar name=value` accessible * Adds `getHiveVar` and `setHiveVar` to the `HiveClient` interface * Adds a SessionVariables trait for sessions like Hive that support variables (including Hive vars) * Adds SessionVariables support to variable substitution * Adds SessionVariables support to the SET command ## How was this patch tested? * Adds a test to all supported Hive versions for accessing Hive variables * Adds HiveVariableSubstitutionSuite Author: Ryan Blue <blue@apache.org> Closes #15738 from rdblue/SPARK-18086-add-hivevar-support.
Diffstat (limited to 'sql/hive-thriftserver')
-rw-r--r--sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala6
1 files changed, 5 insertions, 1 deletions
diff --git a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
index 5dafec1c30..0c79b6f421 100644
--- a/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
+++ b/sql/hive-thriftserver/src/main/scala/org/apache/spark/sql/hive/thriftserver/SparkSQLCLIDriver.scala
@@ -38,7 +38,7 @@ import org.apache.thrift.transport.TSocket
import org.apache.spark.internal.Logging
import org.apache.spark.sql.AnalysisException
-import org.apache.spark.sql.hive.HiveUtils
+import org.apache.spark.sql.hive.{HiveSessionState, HiveUtils}
import org.apache.spark.util.ShutdownHookManager
/**
@@ -291,6 +291,10 @@ private[hive] class SparkSQLCLIDriver extends CliDriver with Logging {
throw new RuntimeException("Remote operations not supported")
}
+ override def setHiveVariables(hiveVariables: java.util.Map[String, String]): Unit = {
+ hiveVariables.asScala.foreach(kv => SparkSQLEnv.sqlContext.conf.setConfString(kv._1, kv._2))
+ }
+
override def processCmd(cmd: String): Int = {
val cmd_trimmed: String = cmd.trim()
val cmd_lower = cmd_trimmed.toLowerCase(Locale.ENGLISH)