aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/src
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-05-10 11:57:01 +0800
committerWenchen Fan <wenchen@databricks.com>2016-05-10 11:57:01 +0800
commitf45379173bc3a3e657b6229bec2faeb409b6ad53 (patch)
tree99bc79f872e4e9c4c42ae214193d76cb7ea90cd1 /sql/hive/src
parent4b4344a81331e48b0a00032ec8285f33cc09788a (diff)
downloadspark-f45379173bc3a3e657b6229bec2faeb409b6ad53.tar.gz
spark-f45379173bc3a3e657b6229bec2faeb409b6ad53.tar.bz2
spark-f45379173bc3a3e657b6229bec2faeb409b6ad53.zip
[SPARK-15187][SQL] Disallow Dropping Default Database
#### What changes were proposed in this pull request? In Hive Metastore, dropping default database is not allowed. However, in `InMemoryCatalog`, this is allowed. This PR is to disallow users to drop default database. #### How was this patch tested? Previously, we already have a test case in HiveDDLSuite. Now, we also add the same one in DDLSuite Author: gatorsmile <gatorsmile@gmail.com> Closes #12962 from gatorsmile/dropDefaultDB.
Diffstat (limited to 'sql/hive/src')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala15
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala19
2 files changed, 24 insertions, 10 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 3220f143aa..75a252ccba 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -62,7 +62,8 @@ private[sql] class HiveSessionCatalog(
override def lookupRelation(name: TableIdentifier, alias: Option[String]): LogicalPlan = {
val table = formatTableName(name.table)
if (name.database.isDefined || !tempTables.contains(table)) {
- val newName = name.copy(table = table)
+ val database = name.database.map(formatDatabaseName)
+ val newName = name.copy(database = database, table = table)
metastoreCatalog.lookupRelation(newName, alias)
} else {
val relation = tempTables(table)
@@ -181,10 +182,12 @@ private[sql] class HiveSessionCatalog(
// // This function is a Hive builtin function.
// ...
// }
- Try(super.lookupFunction(name, children)) match {
+ val database = name.database.map(formatDatabaseName)
+ val funcName = name.copy(database = database)
+ Try(super.lookupFunction(funcName, children)) match {
case Success(expr) => expr
case Failure(error) =>
- if (functionRegistry.functionExists(name.unquotedString)) {
+ if (functionRegistry.functionExists(funcName.unquotedString)) {
// If the function actually exists in functionRegistry, it means that there is an
// error when we create the Expression using the given children.
// We need to throw the original exception.
@@ -193,7 +196,7 @@ private[sql] class HiveSessionCatalog(
// This function is not in functionRegistry, let's try to load it as a Hive's
// built-in function.
// Hive is case insensitive.
- val functionName = name.unquotedString.toLowerCase
+ val functionName = funcName.unquotedString.toLowerCase
// TODO: This may not really work for current_user because current_user is not evaluated
// with session info.
// We do not need to use executionHive at here because we only load
@@ -201,12 +204,12 @@ private[sql] class HiveSessionCatalog(
val functionInfo = {
try {
Option(HiveFunctionRegistry.getFunctionInfo(functionName)).getOrElse(
- failFunctionLookup(name.unquotedString))
+ failFunctionLookup(funcName.unquotedString))
} catch {
// If HiveFunctionRegistry.getFunctionInfo throws an exception,
// we are failing to load a Hive builtin function, which means that
// the given function is not a Hive builtin function.
- case NonFatal(e) => failFunctionLookup(name.unquotedString)
+ case NonFatal(e) => failFunctionLookup(funcName.unquotedString)
}
}
val className = functionInfo.getFunctionClass.getName
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
index aa5b5e6309..a8ba952b49 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveDDLSuite.scala
@@ -498,10 +498,21 @@ class HiveDDLSuite
}
test("drop default database") {
- val message = intercept[AnalysisException] {
- sql("DROP DATABASE default")
- }.getMessage
- assert(message.contains("Can not drop default database"))
+ Seq("true", "false").foreach { caseSensitive =>
+ withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {
+ var message = intercept[AnalysisException] {
+ sql("DROP DATABASE default")
+ }.getMessage
+ assert(message.contains("Can not drop default database"))
+
+ // SQLConf.CASE_SENSITIVE does not affect the result
+ // because the Hive metastore is not case sensitive.
+ message = intercept[AnalysisException] {
+ sql("DROP DATABASE DeFault")
+ }.getMessage
+ assert(message.contains("Can not drop default database"))
+ }
+ }
}
test("desc table for data source table") {