aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorWenchen Fan <wenchen@databricks.com>2016-08-04 16:48:30 +0800
committerCheng Lian <lian@databricks.com>2016-08-04 16:48:30 +0800
commit43f4fd6f9bfff749af17e3c65b53a33f5ecb0922 (patch)
treea9625bdc9b5c5c5851a2e50c2cdbd8b7f8a71a67 /sql/hive
parent27e815c31de26636df089b0b8d9bd678b92d3588 (diff)
downloadspark-43f4fd6f9bfff749af17e3c65b53a33f5ecb0922.tar.gz
spark-43f4fd6f9bfff749af17e3c65b53a33f5ecb0922.tar.bz2
spark-43f4fd6f9bfff749af17e3c65b53a33f5ecb0922.zip
[SPARK-16867][SQL] createTable and alterTable in ExternalCatalog should not take db
## What changes were proposed in this pull request? These 2 methods take `CatalogTable` as parameter, which already have the database information. ## How was this patch tested? existing test Author: Wenchen Fan <wenchen@databricks.com> Closes #14476 from cloud-fan/minor5.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala17
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala2
2 files changed, 6 insertions, 13 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
index cf2b92fb89..8302e3e98a 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveExternalCatalog.scala
@@ -77,14 +77,6 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu
}
}
- private def requireDbMatches(db: String, table: CatalogTable): Unit = {
- if (table.identifier.database != Some(db)) {
- throw new AnalysisException(
- s"Provided database '$db' does not match the one specified in the " +
- s"table definition (${table.identifier.database.getOrElse("n/a")})")
- }
- }
-
private def requireTableExists(db: String, table: String): Unit = {
withClient { getTable(db, table) }
}
@@ -147,11 +139,11 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu
// --------------------------------------------------------------------------
override def createTable(
- db: String,
tableDefinition: CatalogTable,
ignoreIfExists: Boolean): Unit = withClient {
+ assert(tableDefinition.identifier.database.isDefined)
+ val db = tableDefinition.identifier.database.get
requireDbExists(db)
- requireDbMatches(db, tableDefinition)
if (
// If this is an external data source table...
@@ -211,8 +203,9 @@ private[spark] class HiveExternalCatalog(client: HiveClient, hadoopConf: Configu
* Note: As of now, this only supports altering table properties, serde properties,
* and num buckets!
*/
- override def alterTable(db: String, tableDefinition: CatalogTable): Unit = withClient {
- requireDbMatches(db, tableDefinition)
+ override def alterTable(tableDefinition: CatalogTable): Unit = withClient {
+ assert(tableDefinition.identifier.database.isDefined)
+ val db = tableDefinition.identifier.database.get
requireTableExists(db, tableDefinition.identifier.table)
client.alterTable(tableDefinition)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
index c87bda9047..c36b0275f4 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/MetastoreDataSourcesSuite.scala
@@ -741,7 +741,7 @@ class MetastoreDataSourcesSuite extends QueryTest with SQLTestUtils with TestHiv
DATASOURCE_SCHEMA -> schema.json,
"EXTERNAL" -> "FALSE"))
- sharedState.externalCatalog.createTable("default", hiveTable, ignoreIfExists = false)
+ sharedState.externalCatalog.createTable(hiveTable, ignoreIfExists = false)
sessionState.refreshTable(tableName)
val actualSchema = table(tableName).schema