aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorgatorsmile <gatorsmile@gmail.com>2016-05-09 10:49:54 -0700
committerAndrew Or <andrew@databricks.com>2016-05-09 10:49:54 -0700
commitb1e01fd519d4d1bc6d9bd2270f9504d757dbd0d2 (patch)
tree72b25a655fb9b31b1843dfc3e07d619393ae0592
parentbeb16ec556c3b7a23fe0ac7bda66f71abd5c61e9 (diff)
downloadspark-b1e01fd519d4d1bc6d9bd2270f9504d757dbd0d2.tar.gz
spark-b1e01fd519d4d1bc6d9bd2270f9504d757dbd0d2.tar.bz2
spark-b1e01fd519d4d1bc6d9bd2270f9504d757dbd0d2.zip
[SPARK-15199][SQL] Disallow Dropping Build-in Functions
#### What changes were proposed in this pull request? As Hive and the major RDBMS behave, the built-in functions are not allowed to drop. In the current implementation, users can drop the built-in functions. However, after dropping the built-in functions, users are unable to add them back. #### How was this patch tested? Added a test case. Author: gatorsmile <gatorsmile@gmail.com> Closes #12975 from gatorsmile/dropBuildInFunction.
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala5
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala22
2 files changed, 26 insertions, 1 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
index 79c3648212..a9aa8d797a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.execution.command
import org.apache.spark.sql.{AnalysisException, Row, SparkSession}
import org.apache.spark.sql.catalyst.FunctionIdentifier
-import org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException
+import org.apache.spark.sql.catalyst.analysis.{FunctionRegistry, NoSuchFunctionException}
import org.apache.spark.sql.catalyst.catalog.CatalogFunction
import org.apache.spark.sql.catalyst.expressions.{Attribute, ExpressionInfo}
import org.apache.spark.sql.types.{StringType, StructField, StructType}
@@ -157,6 +157,9 @@ case class DropFunction(
throw new AnalysisException(s"Specifying a database in DROP TEMPORARY FUNCTION " +
s"is not allowed: '${databaseName.get}'")
}
+ if (FunctionRegistry.builtin.functionExists(functionName)) {
+ throw new AnalysisException(s"Cannot drop native function '$functionName'")
+ }
catalog.dropTempFunction(functionName, ifExists)
} else {
// We are dropping a permanent function.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
index 13074a694e..d1155678e7 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLSuite.scala
@@ -927,6 +927,28 @@ class DDLSuite extends QueryTest with SharedSQLContext with BeforeAndAfterEach {
}
}
+ test("drop build-in function") {
+ Seq("true", "false").foreach { caseSensitive =>
+ withSQLConf(SQLConf.CASE_SENSITIVE.key -> caseSensitive) {
+ // partition to add already exists
+ var e = intercept[AnalysisException] {
+ sql("DROP TEMPORARY FUNCTION year")
+ }
+ assert(e.getMessage.contains("Cannot drop native function 'year'"))
+
+ e = intercept[AnalysisException] {
+ sql("DROP TEMPORARY FUNCTION YeAr")
+ }
+ assert(e.getMessage.contains("Cannot drop native function 'YeAr'"))
+
+ e = intercept[AnalysisException] {
+ sql("DROP TEMPORARY FUNCTION `YeAr`")
+ }
+ assert(e.getMessage.contains("Cannot drop native function 'YeAr'"))
+ }
+ }
+ }
+
test("describe function") {
checkAnswer(
sql("DESCRIBE FUNCTION log"),