aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorReynold Xin <rxin@databricks.com>2016-05-25 19:17:53 +0200
committerHerman van Hovell <hvanhovell@questtec.nl>2016-05-25 19:17:53 +0200
commit4f27b8dd58a66fca7ddd4c239e02b90c34b1cebd (patch)
tree268759d483eeee49a86e52acd0a71728deff429d /sql/core
parent9082b7968ad952e05fc6f4feb499febef6aa45a7 (diff)
downloadspark-4f27b8dd58a66fca7ddd4c239e02b90c34b1cebd.tar.gz
spark-4f27b8dd58a66fca7ddd4c239e02b90c34b1cebd.tar.bz2
spark-4f27b8dd58a66fca7ddd4c239e02b90c34b1cebd.zip
[SPARK-15436][SQL] Remove DescribeFunction and ShowFunctions
## What changes were proposed in this pull request? This patch removes the last two commands defined in the catalyst module: DescribeFunction and ShowFunctions. They were unnecessary since the parser could just generate DescribeFunctionCommand and ShowFunctionsCommand directly. ## How was this patch tested? Created a new SparkSqlParserSuite. Author: Reynold Xin <rxin@databricks.com> Closes #13292 from rxin/SPARK-15436.
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala33
-rw-r--r--sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala6
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala68
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala2
4 files changed, 101 insertions, 8 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
index f85d6062e8..57f534cd9e 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkSqlParser.scala
@@ -24,7 +24,7 @@ import org.antlr.v4.runtime.{ParserRuleContext, Token}
import org.antlr.v4.runtime.tree.TerminalNode
import org.apache.spark.sql.SaveMode
-import org.apache.spark.sql.catalyst.TableIdentifier
+import org.apache.spark.sql.catalyst.{FunctionIdentifier, TableIdentifier}
import org.apache.spark.sql.catalyst.catalog._
import org.apache.spark.sql.catalyst.parser._
import org.apache.spark.sql.catalyst.parser.SqlBaseParser._
@@ -494,6 +494,37 @@ class SparkSqlAstBuilder(conf: SQLConf) extends AstBuilder {
}
/**
+ * Create a plan for a DESCRIBE FUNCTION command.
+ */
+ override def visitDescribeFunction(ctx: DescribeFunctionContext): LogicalPlan = withOrigin(ctx) {
+ import ctx._
+ val functionName =
+ if (describeFuncName.STRING() != null) {
+ FunctionIdentifier(string(describeFuncName.STRING()), database = None)
+ } else if (describeFuncName.qualifiedName() != null) {
+ visitFunctionName(describeFuncName.qualifiedName)
+ } else {
+ FunctionIdentifier(describeFuncName.getText, database = None)
+ }
+ DescribeFunctionCommand(functionName, EXTENDED != null)
+ }
+
+ /**
+ * Create a plan for a SHOW FUNCTIONS command.
+ */
+ override def visitShowFunctions(ctx: ShowFunctionsContext): LogicalPlan = withOrigin(ctx) {
+ import ctx._
+ if (qualifiedName != null) {
+ val name = visitFunctionName(qualifiedName)
+ ShowFunctionsCommand(name.database, Some(name.funcName))
+ } else if (pattern != null) {
+ ShowFunctionsCommand(None, Some(string(pattern)))
+ } else {
+ ShowFunctionsCommand(None, None)
+ }
+ }
+
+ /**
* Create a [[CreateFunctionCommand]] command.
*
* For example:
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
index c46cecc71f..e40525287a 100644
--- a/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
+++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/SparkStrategies.scala
@@ -416,12 +416,6 @@ private[sql] abstract class SparkStrategies extends QueryPlanner[SparkPlan] {
c.child)
ExecutedCommandExec(cmd) :: Nil
- case logical.ShowFunctions(db, pattern) =>
- ExecutedCommandExec(ShowFunctionsCommand(db, pattern)) :: Nil
-
- case logical.DescribeFunction(function, extended) =>
- ExecutedCommandExec(DescribeFunctionCommand(function, extended)) :: Nil
-
case _ => Nil
}
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
new file mode 100644
index 0000000000..e2858bb475
--- /dev/null
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/SparkSqlParserSuite.scala
@@ -0,0 +1,68 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.spark.sql.execution
+
+import org.apache.spark.sql.catalyst.FunctionIdentifier
+import org.apache.spark.sql.catalyst.parser.ParseException
+import org.apache.spark.sql.catalyst.plans.PlanTest
+import org.apache.spark.sql.catalyst.plans.logical.LogicalPlan
+import org.apache.spark.sql.execution.command.{DescribeFunctionCommand, ShowFunctionsCommand}
+import org.apache.spark.sql.internal.SQLConf
+
+/**
+ * Parser test cases for rules defined in [[SparkSqlParser]].
+ *
+ * See [[org.apache.spark.sql.catalyst.parser.PlanParserSuite]] for rules
+ * defined in the Catalyst module.
+ */
+class SparkSqlParserSuite extends PlanTest {
+
+ private lazy val parser = new SparkSqlParser(new SQLConf)
+
+ private def assertEqual(sqlCommand: String, plan: LogicalPlan): Unit = {
+ comparePlans(parser.parsePlan(sqlCommand), plan)
+ }
+
+ private def intercept(sqlCommand: String, messages: String*): Unit = {
+ val e = intercept[ParseException](parser.parsePlan(sqlCommand))
+ messages.foreach { message =>
+ assert(e.message.contains(message))
+ }
+ }
+
+ test("show functions") {
+ assertEqual("show functions", ShowFunctionsCommand(None, None))
+ assertEqual("show functions foo", ShowFunctionsCommand(None, Some("foo")))
+ assertEqual("show functions foo.bar", ShowFunctionsCommand(Some("foo"), Some("bar")))
+ assertEqual("show functions 'foo\\\\.*'", ShowFunctionsCommand(None, Some("foo\\.*")))
+ intercept("show functions foo.bar.baz", "Unsupported function name")
+ }
+
+ test("describe function") {
+ assertEqual("describe function bar",
+ DescribeFunctionCommand(FunctionIdentifier("bar", database = None), isExtended = false))
+ assertEqual("describe function extended bar",
+ DescribeFunctionCommand(FunctionIdentifier("bar", database = None), isExtended = true))
+ assertEqual("describe function foo.bar",
+ DescribeFunctionCommand(
+ FunctionIdentifier("bar", database = Option("foo")), isExtended = false))
+ assertEqual("describe function extended f.bar",
+ DescribeFunctionCommand(FunctionIdentifier("bar", database = Option("f")), isExtended = true))
+ }
+
+}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
index eab1f55712..850fca5852 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/execution/command/DDLCommandSuite.scala
@@ -33,7 +33,7 @@ import org.apache.spark.sql.types.{IntegerType, StringType, StructType}
// TODO: merge this with DDLSuite (SPARK-14441)
class DDLCommandSuite extends PlanTest {
- private val parser = new SparkSqlParser(new SQLConf)
+ private lazy val parser = new SparkSqlParser(new SQLConf)
private def assertUnsupported(sql: String, containsThesePhrases: Seq[String] = Seq()): Unit = {
val e = intercept[ParseException] {