aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorBill Chambers <bill@databricks.com>2016-06-27 11:50:34 -0700
committerHerman van Hovell <hvanhovell@databricks.com>2016-06-27 11:50:34 -0700
commitc48c8ebc0aad433aab7af9e2ddf544d253ab9fd7 (patch)
tree98483078efa9bac8ddd64a7d43f0573bd6986c96
parent3e4e868c850e6b6da2c0d005167316e1abdc7460 (diff)
downloadspark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.tar.gz
spark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.tar.bz2
spark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.zip
[SPARK-16220][SQL] Revert Change to Bring Back SHOW FUNCTIONS Functionality
## What changes were proposed in this pull request? - Fix tests regarding show functions functionality - Revert `catalog.ListFunctions` and `SHOW FUNCTIONS` to return to `Spark 1.X` functionality. Cherry picked changes from this PR: https://github.com/apache/spark/pull/13413/files ## How was this patch tested? Unit tests. Author: Bill Chambers <bill@databricks.com> Author: Bill Chambers <wchambers@ischool.berkeley.edu> Closes #13916 from anabranch/master.
-rw-r--r--python/pyspark/sql/tests.py12
-rw-r--r--sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala3
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala5
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala6
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala5
5 files changed, 22 insertions, 9 deletions
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 3dc4083704..3f564110ed 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -1509,7 +1509,17 @@ class SQLTests(ReusedPySparkTestCase):
spark.sql("CREATE DATABASE some_db")
functions = dict((f.name, f) for f in spark.catalog.listFunctions())
functionsDefault = dict((f.name, f) for f in spark.catalog.listFunctions("default"))
- self.assertEquals(len(functions), 0)
+ self.assertTrue(len(functions) > 200)
+ self.assertTrue("+" in functions)
+ self.assertTrue("like" in functions)
+ self.assertTrue("month" in functions)
+ self.assertTrue("to_unix_timestamp" in functions)
+ self.assertTrue("current_database" in functions)
+ self.assertEquals(functions["+"], Function(
+ name="+",
+ description=None,
+ className="org.apache.spark.sql.catalyst.expressions.Add",
+ isTemporary=True))
self.assertEquals(functions, functionsDefault)
spark.catalog.registerFunction("temp_func", lambda x: str(x))
spark.sql("CREATE FUNCTION func1 AS 'org.apache.spark.data.bricks'")
diff --git a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
index 7ab10d1c38..2880087b58 100644
--- a/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
+++ b/sql/catalyst/src/main/scala/org/apache/spark/sql/catalyst/catalog/SessionCatalog.scala
@@ -855,8 +855,7 @@ class SessionCatalog(
.map { f => FunctionIdentifier(f, Some(dbName)) }
val loadedFunctions = StringUtils.filterPattern(functionRegistry.listFunction(), pattern)
.map { f => FunctionIdentifier(f) }
- (dbFunctions ++ loadedFunctions)
- .filterNot(f => FunctionRegistry.functionSet.contains(f.funcName))
+ dbFunctions ++ loadedFunctions
}
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
index bbe821b768..59b25cb8d0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/SQLQuerySuite.scala
@@ -80,13 +80,10 @@ class SQLQuerySuite extends QueryTest with SharedSQLContext {
val functions = Array("ilog", "logi", "logii", "logiii", "crc32i", "cubei", "cume_disti",
"isize", "ispace", "to_datei", "date_addi", "current_datei")
- assert(sql("SHOW functions").collect().isEmpty)
-
createFunction(functions)
checkAnswer(sql("SHOW functions"), getFunctions("*"))
- assert(sql("SHOW functions").collect().size === functions.size)
- assert(sql("SHOW functions").collect().toSet === functions.map(Row(_)).toSet)
+ assert(sql("SHOW functions").collect().size > 200)
Seq("^c*", "*e$", "log*", "*date*").foreach { pattern =>
// For the pattern part, only '*' and '|' are allowed as wildcards.
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
index df817f863d..d862e4cfa9 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/internal/CatalogSuite.scala
@@ -174,7 +174,8 @@ class CatalogSuite
}
test("list functions") {
- assert(spark.catalog.listFunctions().collect().isEmpty)
+ assert(Set("+", "current_database", "window").subsetOf(
+ spark.catalog.listFunctions().collect().map(_.name).toSet))
createFunction("my_func1")
createFunction("my_func2")
createTempFunction("my_temp_func")
@@ -191,7 +192,8 @@ class CatalogSuite
}
test("list functions with database") {
- assert(spark.catalog.listFunctions("default").collect().isEmpty)
+ assert(Set("+", "current_database", "window").subsetOf(
+ spark.catalog.listFunctions().collect().map(_.name).toSet))
createDatabase("my_db1")
createDatabase("my_db2")
createFunction("my_func1", Some("my_db1"))
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index 9c1f218253..e8af4fbe87 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -187,6 +187,11 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
}
test("show functions") {
+ val allBuiltinFunctions = FunctionRegistry.builtin.listFunction().toSet[String].toList.sorted
+ val allFunctions = sql("SHOW functions").collect().map(r => r(0))
+ allBuiltinFunctions.foreach { f =>
+ assert(allFunctions.contains(f))
+ }
withTempDatabase { db =>
def createFunction(names: Seq[String]): Unit = {
names.foreach { name =>