diff options
3 files changed, 20 insertions, 6 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala index 714ef825ab..2d2120dda8 100644 --- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala +++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala @@ -160,8 +160,6 @@ class SessionCatalog( val dbName = formatDatabaseName(db) if (dbName == DEFAULT_DATABASE) { throw new AnalysisException(s"Can not drop default database") - } else if (dbName == getCurrentDatabase) { - throw new AnalysisException(s"Can not drop current database `$dbName`") } externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade) } diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala index 187611bc77..b77fef225a 100644 --- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala +++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala @@ -127,6 +127,21 @@ class SessionCatalogSuite extends SparkFunSuite { catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false) } + test("drop current database and drop default database") { + val catalog = new SessionCatalog(newBasicCatalog()) + catalog.setCurrentDatabase("db1") + assert(catalog.getCurrentDatabase == "db1") + catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true) + intercept[NoSuchDatabaseException] { + catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false) + } + catalog.setCurrentDatabase("default") + assert(catalog.getCurrentDatabase == "default") + intercept[AnalysisException] { + catalog.dropDatabase("default", ignoreIfNotExists = false, cascade = true) + } + } + test("alter database") { val catalog = new SessionCatalog(newBasicCatalog()) val db1 = catalog.getDatabaseMetadata("db1") diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala index 22d4c929bf..d4d001497d 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala @@ -1599,10 +1599,11 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach { test("drop current database") { sql("CREATE DATABASE temp") sql("USE temp") - val m = intercept[AnalysisException] { - sql("DROP DATABASE temp") - }.getMessage - assert(m.contains("Can not drop current database `temp`")) + sql("DROP DATABASE temp") + val e = intercept[AnalysisException] { + sql("CREATE TABLE t (a INT, b INT)") + }.getMessage + assert(e.contains("Database 'temp' not found")) } test("drop default database") { |