From 96cc1b5675273c276e04c4dc19ef9033a314292d Mon Sep 17 00:00:00 2001 From: Daoyuan Wang Date: Thu, 3 Nov 2016 00:18:03 -0700 Subject: [SPARK-17122][SQL] support drop current database ## What changes were proposed in this pull request? In Spark 1.6 and earlier, we can drop the database we are using. In Spark 2.0, native implementation prevent us from dropping current database, which may break some old queries. This PR would re-enable the feature. ## How was this patch tested? one new unit test in `SessionCatalogSuite`. Author: Daoyuan Wang Closes #15011 from adrian-wang/dropcurrent. --- .../spark/sql/catalyst/catalog/SessionCatalog.scala | 2 -- .../spark/sql/catalyst/catalog/SessionCatalogSuite.scala | 15 +++++++++++++++ 2 files changed, 15 insertions(+), 2 deletions(-) (limited to 'sql/catalyst') diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 714ef825ab..2d2120dda8 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -160,8 +160,6 @@ class SessionCatalog( val dbName = formatDatabaseName(db) if (dbName == DEFAULT_DATABASE) { throw new AnalysisException(s"Can not drop default database") - } else if (dbName == getCurrentDatabase) { - throw new AnalysisException(s"Can not drop current database `$dbName`") } externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 187611bc77..b77fef225a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -127,6 +127,21 @@ class SessionCatalogSuite extends SparkFunSuite { catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false) } + test("drop current database and drop default database") { + val catalog = new SessionCatalog(newBasicCatalog()) + catalog.setCurrentDatabase("db1") + assert(catalog.getCurrentDatabase == "db1") + catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true) + intercept[NoSuchDatabaseException] { + catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false) + } + catalog.setCurrentDatabase("default") + assert(catalog.getCurrentDatabase == "default") + intercept[AnalysisException] { + catalog.dropDatabase("default", ignoreIfNotExists = false, cascade = true) + } + } + test("alter database") { val catalog = new SessionCatalog(newBasicCatalog()) val db1 = catalog.getDatabaseMetadata("db1") -- cgit v1.2.3