aboutsummaryrefslogtreecommitdiff
path: root/sql/catalyst
diff options
context:
space:
mode:
authorDongjoon Hyun <dongjoon@apache.org>2016-07-11 15:15:47 +0200
committerHerman van Hovell <hvanhovell@databricks.com>2016-07-11 15:15:47 +0200
commit7ac79da0e4607f7f89a3617edf53c2b174b378e8 (patch)
tree45945e49be9c406a8bbfc935f439d14098be9dbc /sql/catalyst
parent9cb1eb7af779e74165552977002158a7dad9bb09 (diff)
downloadspark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.tar.gz
spark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.tar.bz2
spark-7ac79da0e4607f7f89a3617edf53c2b174b378e8.zip
[SPARK-16459][SQL] Prevent dropping current database
## What changes were proposed in this pull request? This PR prevents dropping the current database to avoid errors like the followings. ```scala scala> sql("create database delete_db") scala> sql("use delete_db") scala> sql("drop database delete_db") scala> sql("create table t as select 1") org.apache.spark.sql.catalyst.analysis.NoSuchDatabaseException: Database `delete_db` not found; ``` ## How was this patch tested? Pass the Jenkins tests including an updated testcase. Author: Dongjoon Hyun <dongjoon@apache.org> Closes #14115 from dongjoon-hyun/SPARK-16459.
Diffstat (limited to 'sql/catalyst')
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala20
1 files changed, 13 insertions, 7 deletions
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index ffaefeb09a..d88b5ffc05 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -34,6 +34,10 @@ import org.apache.spark.sql.catalyst.expressions.{Expression, ExpressionInfo}
import org.apache.spark.sql.catalyst.plans.logical.{LogicalPlan, SubqueryAlias}
import org.apache.spark.sql.catalyst.util.StringUtils
+object SessionCatalog {
+ val DEFAULT_DATABASE = "default"
+}
+
/**
* An internal catalog that is used by a Spark Session. This internal catalog serves as a
* proxy to the underlying metastore (e.g. Hive Metastore) and it also manages temporary
@@ -47,6 +51,7 @@ class SessionCatalog(
functionRegistry: FunctionRegistry,
conf: CatalystConf,
hadoopConf: Configuration) extends Logging {
+ import SessionCatalog._
import CatalogTypes.TablePartitionSpec
// For testing only.
@@ -77,7 +82,7 @@ class SessionCatalog(
// the corresponding item in the current database.
@GuardedBy("this")
protected var currentDb = {
- val defaultName = "default"
+ val defaultName = DEFAULT_DATABASE
val defaultDbDefinition =
CatalogDatabase(defaultName, "default database", conf.warehousePath, Map())
// Initialize default database if it doesn't already exist
@@ -146,8 +151,10 @@ class SessionCatalog(
def dropDatabase(db: String, ignoreIfNotExists: Boolean, cascade: Boolean): Unit = {
val dbName = formatDatabaseName(db)
- if (dbName == "default") {
+ if (dbName == DEFAULT_DATABASE) {
throw new AnalysisException(s"Can not drop default database")
+ } else if (dbName == getCurrentDatabase) {
+ throw new AnalysisException(s"Can not drop current database `${dbName}`")
}
externalCatalog.dropDatabase(dbName, ignoreIfNotExists, cascade)
}
@@ -878,14 +885,14 @@ class SessionCatalog(
* This is mainly used for tests.
*/
private[sql] def reset(): Unit = synchronized {
- val default = "default"
- listDatabases().filter(_ != default).foreach { db =>
+ setCurrentDatabase(DEFAULT_DATABASE)
+ listDatabases().filter(_ != DEFAULT_DATABASE).foreach { db =>
dropDatabase(db, ignoreIfNotExists = false, cascade = true)
}
- listTables(default).foreach { table =>
+ listTables(DEFAULT_DATABASE).foreach { table =>
dropTable(table, ignoreIfNotExists = false)
}
- listFunctions(default).map(_._1).foreach { func =>
+ listFunctions(DEFAULT_DATABASE).map(_._1).foreach { func =>
if (func.database.isDefined) {
dropFunction(func, ignoreIfNotExists = false)
} else {
@@ -902,7 +909,6 @@ class SessionCatalog(
require(functionBuilder.isDefined, s"built-in function '$f' is missing function builder")
functionRegistry.registerFunction(f, expressionInfo.get, functionBuilder.get)
}
- setCurrentDatabase(default)
}
}