From 261c55dd8808502fb7f3384eb537d26a4a8123d7 Mon Sep 17 00:00:00 2001 From: gatorsmile Date: Fri, 26 Aug 2016 11:19:03 -0700 Subject: [SPARK-17250][SQL] Remove HiveClient and setCurrentDatabase from HiveSessionCatalog ### What changes were proposed in this pull request? This is the first step to remove `HiveClient` from `HiveSessionState`. In the metastore interaction, we always use the fully qualified table name when accessing/operating a table. That means, we always specify the database. Thus, it is not necessary to use `HiveClient` to change the active database in Hive metastore. In `HiveSessionCatalog `, `setCurrentDatabase` is the only function that uses `HiveClient`. Thus, we can remove it after removing `setCurrentDatabase` ### How was this patch tested? The existing test cases. Author: gatorsmile Closes #14821 from gatorsmile/setCurrentDB. --- .../main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala | 7 ------- .../main/scala/org/apache/spark/sql/hive/HiveSessionState.scala | 1 - 2 files changed, 8 deletions(-) (limited to 'sql/hive') diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala index 86d3b6de0d..bfa5899faf 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala @@ -34,7 +34,6 @@ import org.apache.spark.sql.catalyst.expressions.{Cast, Expression, ExpressionIn import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias} import org.apache.spark.sql.catalyst.rules.Rule import org.apache.spark.sql.hive.HiveShim.HiveFunctionWrapper -import org.apache.spark.sql.hive.client.HiveClient import org.apache.spark.sql.internal.SQLConf import org.apache.spark.sql.types.{DecimalType, DoubleType} import org.apache.spark.util.Utils @@ -42,7 +41,6 @@ import org.apache.spark.util.Utils private[sql] class HiveSessionCatalog( externalCatalog: HiveExternalCatalog, - client: HiveClient, sparkSession: SparkSession, functionResourceLoader: FunctionResourceLoader, functionRegistry: FunctionRegistry, @@ -55,11 +53,6 @@ private[sql] class HiveSessionCatalog( conf, hadoopConf) { - override def setCurrentDatabase(db: String): Unit = { - super.setCurrentDatabase(db) - client.setCurrentDatabase(db) - } - override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = { val table = formatTableName(name.table) if (name.database.isDefined || !tempTables.contains(table)) { diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala index f3c4135da6..15e1255653 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionState.scala @@ -45,7 +45,6 @@ private[hive] class HiveSessionState(sparkSession: SparkSession) override lazy val catalog = { new HiveSessionCatalog( sparkSession.sharedState.externalCatalog.asInstanceOf[HiveExternalCatalog], - metadataHive, sparkSession, functionResourceLoader, functionRegistry, -- cgit v1.2.3