aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-04-21 11:42:25 -0700
committerReynold Xin <rxin@databricks.com>2016-04-21 11:42:25 -0700
commit3a21e8d5ed640e3f82946893e24c099aa723c169 (patch)
treef67f237c69f42f92d74d05ba3e835257116fb981
parent79008e6cfd0c93a09e520850306dad347c1ad3b0 (diff)
downloadspark-3a21e8d5ed640e3f82946893e24c099aa723c169.tar.gz
spark-3a21e8d5ed640e3f82946893e24c099aa723c169.tar.bz2
spark-3a21e8d5ed640e3f82946893e24c099aa723c169.zip
[SPARK-14795][SQL] Remove the use of Hive's variable substitution
## What changes were proposed in this pull request? This patch builds on #12556 and completely removes the use of Hive's variable substitution. ## How was this patch tested? Covered by existing tests. Author: Reynold Xin <rxin@databricks.com> Closes #12561 from rxin/SPARK-14795.
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala3
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala12
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala4
3 files changed, 8 insertions, 11 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
index 171def43b5..6f4332c65f 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
@@ -21,7 +21,6 @@ import java.util.regex.Pattern
import org.apache.hadoop.hive.conf.HiveConf
import org.apache.hadoop.hive.conf.HiveConf.ConfVars
-import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.spark.sql._
import org.apache.spark.sql.catalyst.analysis.Analyzer
@@ -109,7 +108,7 @@ private[hive] class HiveSessionState(ctx: SQLContext) extends SessionState(ctx)
/**
* Parser for HiveQl query texts.
*/
- override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf, hiveconf)
+ override lazy val sqlParser: ParserInterface = new HiveSqlParser(conf)
/**
* Planner that takes into account Hive-specific strategies.
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
index 1c4cda7109..989da92bc7 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/execution/HiveSqlParser.scala
@@ -20,8 +20,6 @@ package org.apache.spark.sql.hive.execution
import scala.util.Try
import org.antlr.v4.runtime.Token
-import org.apache.hadoop.hive.conf.HiveConf
-import org.apache.hadoop.hive.ql.parse.VariableSubstitution
import org.apache.hadoop.hive.serde.serdeConstants
import org.apache.spark.sql.catalyst.catalog._
@@ -29,23 +27,23 @@ import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
import org.apache.spark.sql.execution.SparkSqlAstBuilder
-import org.apache.spark.sql.internal.SQLConf
+import org.apache.spark.sql.internal.{SQLConf, VariableSubstitution}
/**
* Concrete parser for HiveQl statements.
*/
-class HiveSqlParser(conf: SQLConf, hiveconf: HiveConf) extends AbstractSqlParser {
+class HiveSqlParser(conf: SQLConf) extends AbstractSqlParser {
val astBuilder = new HiveSqlAstBuilder(conf)
- lazy val substitutor = new VariableSubstitution
+ private val substitutor = new VariableSubstitution(conf)
protected override def parse[T](command: String)(toResult: SqlBaseParser => T): T = {
- super.parse(substitutor.substitute(hiveconf, command))(toResult)
+ super.parse(substitutor.substitute(command))(toResult)
}
protected override def nativeCommand(sqlText: String): LogicalPlan = {
- HiveNativeCommand(substitutor.substitute(hiveconf, sqlText))
+ HiveNativeCommand(substitutor.substitute(sqlText))
}
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index b97e9fe35c..345ee8ef28 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -512,13 +512,13 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
sql("SELECT key FROM ${hiveconf:tbl} ORDER BY key, value limit 1"),
sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq)
- sql("set hive.variable.substitute=false") // disable the substitution
+ sql("set spark.sql.variable.substitute=false") // disable the substitution
sql("set tbl2=src")
intercept[Exception] {
sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1").collect()
}
- sql("set hive.variable.substitute=true") // enable the substitution
+ sql("set spark.sql.variable.substitute=true") // enable the substitution
checkAnswer(
sql("SELECT key FROM ${hiveconf:tbl2} ORDER BY key, value limit 1"),
sql("SELECT key FROM src ORDER BY key, value limit 1").collect().toSeq)