aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-08-26 11:19:03 -0700
committerYin Huai <yhuai@databricks.com>2016-08-26 11:19:03 -0700
commit261c55dd8808502fb7f3384eb537d26a4a8123d7 (patch)
treeab8810fdacc2451fdf19a97d5ea8b16741dc3532 /sql/hive
parentfd4ba3f626f49d7d616a2a334d45b1c736e1db1c (diff)
downloadspark-261c55dd8808502fb7f3384eb537d26a4a8123d7.tar.gz
spark-261c55dd8808502fb7f3384eb537d26a4a8123d7.tar.bz2
spark-261c55dd8808502fb7f3384eb537d26a4a8123d7.zip
[SPARK-17250][SQL] Remove HiveClient and setCurrentDatabase from HiveSessionCatalog
### What changes were proposed in this pull request? This is the first step to remove `HiveClient` from `HiveSessionState`. In the metastore interaction, we always use the fully qualified table name when accessing/operating a table. That means, we always specify the database. Thus, it is not necessary to use `HiveClient` to change the active database in Hive metastore. In `HiveSessionCatalog `, `setCurrentDatabase` is the only function that uses `HiveClient`. Thus, we can remove it after removing `setCurrentDatabase` ### How was this patch tested? The existing test cases. Author: gatorsmile <gatorsmile@gmail.com> Closes #14821 from gatorsmile/setCurrentDB.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala7
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala1
2 files changed, 0 insertions, 8 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 86d3b6de0d..bfa5899faf 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, ExpressionIn
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.rules.Rule
import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper
-import org.apache.spark.sql.hive.client.HiveClient
import org.apache.spark.sql.internal.SQLConf
import org.apache.spark.sql.types.{DecimalType, DoubleType}
import org.apache.spark.util.Utils
@@ -42,7 +41,6 @@ import org.apache.spark.util.Utils
private[sql] class HiveSessionCatalog(
externalCatalog: HiveExternalCatalog,
- client: HiveClient,
sparkSession: SparkSession,
functionResourceLoader: FunctionResourceLoader,
functionRegistry: FunctionRegistry,
@@ -55,11 +53,6 @@ private[sql] class HiveSessionCatalog(
conf,
hadoopConf) {
- override def setCurrentDatabase(db: String): Unit = {
- super.setCurrentDatabase(db)
- client.setCurrentDatabase(db)
- }
-
override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = {
val table = formatTableName(name.table)
if (name.database.isDefined || !tempTables.contains(table)) {
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
index f3c4135da6..15e1255653 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala
@@ -45,7 +45,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession)
override lazy val catalog = {
new HiveSessionCatalog(
sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog],
- metadataHive,
sparkSession,
functionResourceLoader,
functionRegistry,