aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorDaoyuan Wang <daoyuan.wang@intel.com>2016-11-03 00:18:03 -0700
committergatorsmile <gatorsmile@gmail.com>2016-11-03 00:18:03 -0700
commit96cc1b5675273c276e04c4dc19ef9033a314292d (patch)
tree573b40e8132f00cf0d0fa18829750320abefafba /sql/catalyst
parentd24e736471f34ef8f2c12766393379c4213fe96e (diff)
downloadspark-96cc1b5675273c276e04c4dc19ef9033a314292d.tar.gz
spark-96cc1b5675273c276e04c4dc19ef9033a314292d.tar.bz2
spark-96cc1b5675273c276e04c4dc19ef9033a314292d.zip
[SPARK-17122][SQL] support drop current database
## What changes were proposed in this pull request? In Spark 1.6 and earlier, we can drop the database we are using. In Spark 2.0, native implementation prevent us from dropping current database, which may break some old queries. This PR would re-enable the feature. ## How was this patch tested? one new unit test in `SessionCatalogSuite`. Author: Daoyuan Wang <daoyuan.wang@intel.com> Closes #15011 from adrian-wang/dropcurrent.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala2
-rw-r--r--sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala15
2 files changed, 15 insertions, 2 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 714ef825ab..2d2120dda8 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -160,8 +160,6 @@ class SessionCatalog(
val dbName = formatDatabaseName(db)
if (dbName == DEFAULT_DATABASE) {
throw new AnalysisException(s"Can not drop default database")
- } else if (dbName == getCurrentDatabase) {
- throw new AnalysisException(s"Can not drop current database `$dbName`")
}
externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade)
}
diff --git a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
index 187611bc77..b77fef225a 100644
--- a/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
+++ b/sql/catalyst/src/test/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalogSuite.scala
@@ -127,6 +127,21 @@ class SessionCatalogSuite extends SparkFunSuite {
catalog.dropDatabase("db_that_does_not_exist", ignoreIfNotExists = true, cascade = false)
}
+ test("drop current database and drop default database") {
+ val catalog = new SessionCatalog(newBasicCatalog())
+ catalog.setCurrentDatabase("db1")
+ assert(catalog.getCurrentDatabase == "db1")
+ catalog.dropDatabase("db1", ignoreIfNotExists = false, cascade = true)
+ intercept[NoSuchDatabaseException] {
+ catalog.createTable(newTable("tbl1", "db1"), ignoreIfExists = false)
+ }
+ catalog.setCurrentDatabase("default")
+ assert(catalog.getCurrentDatabase == "default")
+ intercept[AnalysisException] {
+ catalog.dropDatabase("default", ignoreIfNotExists = false, cascade = true)
+ }
+ }
+
test("alter database") {
val catalog = new SessionCatalog(newBasicCatalog())
val db1 = catalog.getDatabaseMetadata("db1")