aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala38
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala16
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala42
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala1
4 files changed, 90 insertions, 7 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
index b9542c7173..c78b9b429c 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkQl.scala
@@ -165,11 +165,11 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
case _ => parseFailed("Invalid CREATE FUNCTION command", node)
}
// If database name is specified, there are 3 tokens, otherwise 2.
- val (funcName, alias) = funcNameArgs match {
+ val (dbName, funcName, alias) = funcNameArgs match {
case Token(dbName, Nil) :: Token(fname, Nil) :: Token(aname, Nil) :: Nil =>
- (unquoteString(dbName) + "." + unquoteString(fname), unquoteString(aname))
+ (Some(unquoteString(dbName)), unquoteString(fname), unquoteString(aname))
case Token(fname, Nil) :: Token(aname, Nil) :: Nil =>
- (unquoteString(fname), unquoteString(aname))
+ (None, unquoteString(fname), unquoteString(aname))
case _ =>
parseFailed("Invalid CREATE FUNCTION command", node)
}
@@ -190,7 +190,37 @@ private[sql] class SparkQl(conf: ParserConf = SimpleParserConf()) extends Cataly
}
case _ => parseFailed("Invalid CREATE FUNCTION command", node)
}
- CreateFunction(funcName, alias, resources, temp.isDefined)(node.source)
+ CreateFunction(dbName, funcName, alias, resources, temp.isDefined)(node.source)
+
+ // DROP [TEMPORARY] FUNCTION [IF EXISTS] function_name;
+ case Token("TOK_DROPFUNCTION", args) =>
+ // Example format:
+ //
+ // TOK_DROPFUNCTION
+ // :- db_name
+ // :- func_name
+ // :- TOK_IFEXISTS
+ // +- TOK_TEMPORARY
+ val (funcNameArgs, otherArgs) = args.partition {
+ case Token("TOK_IFEXISTS", _) => false
+ case Token("TOK_TEMPORARY", _) => false
+ case Token(_, Nil) => true
+ case _ => parseFailed("Invalid DROP FUNCTION command", node)
+ }
+ // If database name is specified, there are 2 tokens, otherwise 1.
+ val (dbName, funcName) = funcNameArgs match {
+ case Token(dbName, Nil) :: Token(fname, Nil) :: Nil =>
+ (Some(unquoteString(dbName)), unquoteString(fname))
+ case Token(fname, Nil) :: Nil =>
+ (None, unquoteString(fname))
+ case _ =>
+ parseFailed("Invalid DROP FUNCTION command", node)
+ }
+
+ val Seq(ifExists, temp) = getClauses(Seq(
+ "TOK_IFEXISTS", "TOK_TEMPORARY"), otherArgs)
+
+ DropFunction(dbName, funcName, ifExists.isDefined, temp.isDefined)(node.source)
case Token("TOK_ALTERTABLE", alterTableArgs) =>
AlterTableCommandParser.parse(node)
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
index 373b557683..a0f5b75284 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/ddl.scala
@@ -19,7 +19,8 @@ package org.apache.spark.sql.execution.command
import org.apache.spark.internal.Logging
import org.apache.spark.sql.{Row, SQLContext}
-import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
+import org.apache.spark.sql.catalyst.catalog.CatalogFunction
import org.apache.spark.sql.catalyst.catalog.ExternalCatalog.TablePartitionSpec
import org.apache.spark.sql.catalyst.expressions.{Attribute, AttributeReference}
import org.apache.spark.sql.execution.datasources.BucketSpec
@@ -71,12 +72,25 @@ case class DropDatabase(
extends NativeDDLCommand(sql) with Logging
case class CreateFunction(
+ databaseName: Option[String],
functionName: String,
alias: String,
resources: Seq[(String, String)],
isTemp: Boolean)(sql: String)
extends NativeDDLCommand(sql) with Logging
+/**
+ * The DDL command that drops a function.
+ * ifExists: returns an error if the function doesn't exist, unless this is true.
+ * isTemp: indicates if it is a temporary function.
+ */
+case class DropFunction(
+ databaseName: Option[String],
+ functionName: String,
+ ifExists: Boolean,
+ isTemp: Boolean)(sql: String)
+ extends NativeDDLCommand(sql) with Logging
+
case class AlterTableRename(
oldName: TableIdentifier,
newName: TableIdentifier)(sql: String)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index a33175aa60..18f48ffa94 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -117,12 +117,14 @@ class DDLCommandSuite extends PlanTest {
val parsed1 = parser.parsePlan(sql1)
val parsed2 = parser.parsePlan(sql2)
val expected1 = CreateFunction(
+ None,
"helloworld",
"com.matthewrathbone.example.SimpleUDFExample",
Seq(("jar", "/path/to/jar1"), ("jar", "/path/to/jar2")),
isTemp = true)(sql1)
val expected2 = CreateFunction(
- "hello.world",
+ Some("hello"),
+ "world",
"com.matthewrathbone.example.SimpleUDFExample",
Seq(("archive", "/path/to/archive"), ("file", "/path/to/file")),
isTemp = false)(sql2)
@@ -130,6 +132,44 @@ class DDLCommandSuite extends PlanTest {
comparePlans(parsed2, expected2)
}
+ test("drop function") {
+ val sql1 = "DROP TEMPORARY FUNCTION helloworld"
+ val sql2 = "DROP TEMPORARY FUNCTION IF EXISTS helloworld"
+ val sql3 = "DROP FUNCTION hello.world"
+ val sql4 = "DROP FUNCTION IF EXISTS hello.world"
+
+ val parsed1 = parser.parsePlan(sql1)
+ val parsed2 = parser.parsePlan(sql2)
+ val parsed3 = parser.parsePlan(sql3)
+ val parsed4 = parser.parsePlan(sql4)
+
+ val expected1 = DropFunction(
+ None,
+ "helloworld",
+ ifExists = false,
+ isTemp = true)(sql1)
+ val expected2 = DropFunction(
+ None,
+ "helloworld",
+ ifExists = true,
+ isTemp = true)(sql2)
+ val expected3 = DropFunction(
+ Some("hello"),
+ "world",
+ ifExists = false,
+ isTemp = false)(sql3)
+ val expected4 = DropFunction(
+ Some("hello"),
+ "world",
+ ifExists = true,
+ isTemp = false)(sql4)
+
+ comparePlans(parsed1, expected1)
+ comparePlans(parsed2, expected2)
+ comparePlans(parsed3, expected3)
+ comparePlans(parsed4, expected4)
+ }
+
test("alter table: rename table") {
val sql = "ALTER TABLE table_name RENAME TO new_table_name"
val parsed = parser.parsePlan(sql)
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
index 6586b90377..61fe0985c1 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveQl.scala
@@ -102,7 +102,6 @@ private[hive] class HiveQl(conf: ParserConf) extends SparkQl(conf) with Logging
"TOK_DESCDATABASE",
- "TOK_DROPFUNCTION",
"TOK_DROPINDEX",
"TOK_DROPMACRO",
"TOK_DROPROLE",