diff options
author | gatorsmile <gatorsmile@gmail.com> | 2016-05-06 11:43:07 -0700 |
---|---|---|
committer | Yin Huai <yhuai@databricks.com> | 2016-05-06 11:43:07 -0700 |
commit | 5c8fad7b9bfd6677111a8e27e2574f82b04ec479 (patch) | |
tree | 5beee2fc0016634be8461e12d1617c7edfb7b3f9 /sql/core | |
parent | 76ad04d9a0a7d4dfb762318d9c7be0d7720f4e1a (diff) | |
download | spark-5c8fad7b9bfd6677111a8e27e2574f82b04ec479.tar.gz spark-5c8fad7b9bfd6677111a8e27e2574f82b04ec479.tar.bz2 spark-5c8fad7b9bfd6677111a8e27e2574f82b04ec479.zip |
[SPARK-15108][SQL] Describe Permanent UDTF
#### What changes were proposed in this pull request?
When Describe a UDTF, the command returns a wrong result. The command is unable to find the function, which has been created and cataloged in the catalog but not in the functionRegistry.
This PR is to correct it. If the function is not in the functionRegistry, we will check the catalog for collecting the information of the UDTF function.
#### How was this patch tested?
Added test cases to verify the results
Author: gatorsmile <gatorsmile@gmail.com>
Closes #12885 from gatorsmile/showFunction.
Diffstat (limited to 'sql/core')
-rw-r--r-- | sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala | 20 | ||||
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala | 2 |
2 files changed, 12 insertions, 10 deletions
diff --git a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala index 73c1ef70a2..79c3648212 100644 --- a/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala +++ b/sql/core/src/main/scala/org/apache/spark/sql/execution/command/functions.scala @@ -19,6 +19,7 @@ package org.apache.spark.sql.execution.command import org.apache.spark.sql.{AnalysisException, Row, SparkSession} import org.apache.spark.sql.catalyst.FunctionIdentifier +import org.apache.spark.sql.catalyst.analysis.NoSuchFunctionException import org.apache.spark.sql.catalyst.catalog.CatalogFunction import org.apache.spark.sql.catalyst.expressions.{Attribute, ExpressionInfo} import org.apache.spark.sql.types.{StringType, StructField, StructType} @@ -82,7 +83,7 @@ case class CreateFunction( * }}} */ case class DescribeFunction( - functionName: String, + functionName: FunctionIdentifier, isExtended: Boolean) extends RunnableCommand { override val output: Seq[Attribute] = { @@ -92,7 +93,7 @@ case class DescribeFunction( private def replaceFunctionName(usage: String, functionName: String): String = { if (usage == null) { - "To be added." + "N/A." } else { usage.replaceAll("_FUNC_", functionName) } @@ -100,7 +101,7 @@ case class DescribeFunction( override def run(sparkSession: SparkSession): Seq[Row] = { // Hard code "<>", "!=", "between", and "case" for now as there is no corresponding functions. - functionName.toLowerCase match { + functionName.funcName.toLowerCase match { case "<>" => Row(s"Function: $functionName") :: Row(s"Usage: a <> b - Returns TRUE if a is not equal to b") :: Nil @@ -115,12 +116,13 @@ case class DescribeFunction( Row(s"Function: case") :: Row(s"Usage: CASE a WHEN b THEN c [WHEN d THEN e]* [ELSE f] END - " + s"When a = b, returns c; when a = d, return e; else return f") :: Nil - case _ => sparkSession.sessionState.functionRegistry.lookupFunction(functionName) match { - case Some(info) => + case _ => + try { + val info = sparkSession.sessionState.catalog.lookupFunctionInfo(functionName) val result = Row(s"Function: ${info.getName}") :: Row(s"Class: ${info.getClassName}") :: - Row(s"Usage: ${replaceFunctionName(info.getUsage(), info.getName)}") :: Nil + Row(s"Usage: ${replaceFunctionName(info.getUsage, info.getName)}") :: Nil if (isExtended) { result :+ @@ -128,9 +130,9 @@ case class DescribeFunction( } else { result } - - case None => Seq(Row(s"Function: $functionName not found.")) - } + } catch { + case _: NoSuchFunctionException => Seq(Row(s"Function: $functionName not found.")) + } } } } diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala index ec5163b658..1ff288cd19 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala @@ -90,7 +90,7 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext { test("SPARK-14415: All functions should have own descriptions") { for (f <- sqlContext.sessionState.functionRegistry.listFunction()) { if (!Seq("cube", "grouping", "grouping_id", "rollup", "window").contains(f)) { - checkKeywordsNotExist(sql(s"describe function `$f`"), "To be added.") + checkKeywordsNotExist(sql(s"describe function `$f`"), "N/A.") } } } |