aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-07-11 15:15:47 +0200
committerHerman van Hovell <hvanhovell@databricks.com>2016-07-11 15:15:47 +0200
commit7ac79da0e4607f7f89a3617edf53c2b174b378e8 (patch)
tree45945e49be9c406a8bbfc935f439d14098be9dbc /sql/hive
parent9cb1eb7af779e74165552977002158a7dad9bb09 (diff)
downloadspark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.tar.gz
spark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.tar.bz2
spark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.zip
[SPARK-16459][SQL] Prevent dropping current database
## What changes were proposed in this pull request? This PR prevents dropping the current database to avoid errors like the followings. ```scala scala> sql("create database delete_db") scala> sql("use delete_db") scala> sql("drop database delete_db") scala> sql("create table t as select 1") org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException: Database `delete_db` not found; ``` ## How was this patch tested? Pass the Jenkins tests including an updated testcase. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #14115 from dongjoon-hyun/SPARK-16459.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala1
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala2
2 files changed, 3 insertions, 0 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
index 3aa8174702..57363b7259 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/HiveContextCompatibilitySuite.scala
@@ -93,6 +93,7 @@ class HiveContextCompatibilitySuite extends SparkFunSuite with BeforeAndAfterEac
hc.sql("DROP TABLE mee_table")
val tables2 = hc.sql("SHOW TABLES IN mee_db").collect().map(_.getString(0))
assert(tables2.isEmpty)
+ hc.sql("USE default")
hc.sql("DROP DATABASE mee_db CASCADE")
val databases3 = hc.sql("SHOW DATABASES").collect().map(_.getString(0))
assert(databases3.toSeq == Seq("default"))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index 93e50f4ee9..343d7bae98 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -472,6 +472,7 @@ class HiveDDLSuite
sql(s"DROP TABLE $tabName")
assert(tmpDir.listFiles.isEmpty)
+ sql("USE default")
sql(s"DROP DATABASE $dbName")
assert(!fs.exists(new Path(tmpDir.toString)))
}
@@ -526,6 +527,7 @@ class HiveDDLSuite
assert(!tableDirectoryExists(TableIdentifier(tabName), Option(expectedDBLocation)))
}
+ sql(s"USE default")
val sqlDropDatabase = s"DROP DATABASE $dbName ${if (cascade) "CASCADE" else "RESTRICT"}"
if (tableExists && !cascade) {
val message = intercept[AnalysisException] {