From 43f4fd6f9bfff749af17e3c65b53a33f5ecb0922 Mon Sep 17 00:00:00 2001 From: Wenchen Fan Date: Thu, 4 Aug 2016 16:48:30 +0800 Subject: [SPARK-16867][SQL] createTable and alterTable in ExternalCatalog should not take db ## What changes were proposed in this pull request? These 2 methods take `CatalogTable` as parameter, which already have the database information. ## How was this patch tested? existing test Author: Wenchen Fan Closes #14476 from cloud-fan/minor5. --- .../org/apache/spark/sql/hive/HiveExternalCatalog.scala | 17 +++++------------ .../spark/sql/hive/MetastoreDataSourcesSuite.scala | 2 +- 2 files changed, 6 insertions(+), 13 deletions(-) (limited to 'sql/hive/src') diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala index cf2b92fb89..8302e3e98a 100644 --- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala +++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala @@ -77,14 +77,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu } } - private def requireDbMatches(db: String, table: CatalogTable): Unit = { - if (table.identifier.database != Some(db)) { - throw new AnalysisException( - s"Provided database '$db' does not match the one specified in the " + - s"table definition (${table.identifier.database.getOrElse("n/a")})") - } - } - private def requireTableExists(db: String, table: String): Unit = { withClient { getTable(db, table) } } @@ -147,11 +139,11 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu // -------------------------------------------------------------------------- override def createTable( - db: String, tableDefinition: CatalogTable, ignoreIfExists: Boolean): Unit = withClient { + assert(tableDefinition.identifier.database.isDefined) + val db = tableDefinition.identifier.database.get requireDbExists(db) - requireDbMatches(db, tableDefinition) if ( // If this is an external data source table... @@ -211,8 +203,9 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu * Note: As of now, this only supports altering table properties, serde properties, * and num buckets! */ - override def alterTable(db: String, tableDefinition: CatalogTable): Unit = withClient { - requireDbMatches(db, tableDefinition) + override def alterTable(tableDefinition: CatalogTable): Unit = withClient { + assert(tableDefinition.identifier.database.isDefined) + val db = tableDefinition.identifier.database.get requireTableExists(db, tableDefinition.identifier.table) client.alterTable(tableDefinition) } diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala index c87bda9047..c36b0275f4 100644 --- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala +++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala @@ -741,7 +741,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv DATASOURCE_SCHEMA -> schema.json, "EXTERNAL" -> "FALSE")) - sharedState.externalCatalog.createTable("default", hiveTable, ignoreIfExists = false) + sharedState.externalCatalog.createTable(hiveTable, ignoreIfExists = false) sessionState.refreshTable(tableName) val actualSchema = table(tableName).schema -- cgit v1.2.3