aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorBill Chambers <bill@databricks.com>2016-06-27 11:50:34 -0700
committerHerman van Hovell <hvanhovell@databricks.com>2016-06-27 11:50:34 -0700
commitc48c8ebc0aad433aab7af9e2ddf544d253ab9fd7 (patch)
tree98483078efa9bac8ddd64a7d43f0573bd6986c96 /python
parent3e4e868c850e6b6da2c0d005167316e1abdc7460 (diff)
downloadspark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.tar.gz
spark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.tar.bz2
spark-c48c8ebc0aad433aab7af9e2ddf544d253ab9fd7.zip
[SPARK-16220][SQL] Revert Change to Bring Back SHOW FUNCTIONS Functionality
## What changes were proposed in this pull request? - Fix tests regarding show functions functionality - Revert `catalog.ListFunctions` and `SHOW FUNCTIONS` to return to `Spark 1.X` functionality. Cherry picked changes from this PR: https://github.com/apache/spark/pull/13413/files ## How was this patch tested? Unit tests. Author: Bill Chambers <bill@databricks.com> Author: Bill Chambers <wchambers@ischool.berkeley.edu> Closes #13916 from anabranch/master.
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/sql/tests.py12
1 files changed, 11 insertions, 1 deletions
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 3dc4083704..3f564110ed 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -1509,7 +1509,17 @@ class SQLTests(ReusedPySparkTestCase):
spark.sql("CREATE DATABASE some_db")
functions = dict((f.name, f) for f in spark.catalog.listFunctions())
functionsDefault = dict((f.name, f) for f in spark.catalog.listFunctions("default"))
- self.assertEquals(len(functions), 0)
+ self.assertTrue(len(functions) > 200)
+ self.assertTrue("+" in functions)
+ self.assertTrue("like" in functions)
+ self.assertTrue("month" in functions)
+ self.assertTrue("to_unix_timestamp" in functions)
+ self.assertTrue("current_database" in functions)
+ self.assertEquals(functions["+"], Function(
+ name="+",
+ description=None,
+ className="org.apache.spark.sql.catalyst.expressions.Add",
+ isTemporary=True))
self.assertEquals(functions, functionsDefault)
spark.catalog.registerFunction("temp_func", lambda x: str(x))
spark.sql("CREATE FUNCTION func1 AS 'org.apache.spark.data.bricks'")