aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql
diff options
context:
space:
mode:
authorSandeep Singh <sandeep@techaddict.me>2016-06-13 21:58:52 -0700
committerYin Huai <yhuai@databricks.com>2016-06-13 21:58:52 -0700
commit1842cdd4ee9f30b0a5f579e26ff5194e81e3634c (patch)
treef499dc95ea2f0765dd89b69857a47fcee8fae433 /python/pyspark/sql
parentbaa3e633e18c47b12e79fe3ddc01fc8ec010f096 (diff)
downloadspark-1842cdd4ee9f30b0a5f579e26ff5194e81e3634c.tar.gz
spark-1842cdd4ee9f30b0a5f579e26ff5194e81e3634c.tar.bz2
spark-1842cdd4ee9f30b0a5f579e26ff5194e81e3634c.zip
[SPARK-15663][SQL] SparkSession.catalog.listFunctions shouldn't include the list of built-in functions
## What changes were proposed in this pull request? SparkSession.catalog.listFunctions currently returns all functions, including the list of built-in functions. This makes the method not as useful because anytime it is run the result set contains over 100 built-in functions. ## How was this patch tested? CatalogSuite Author: Sandeep Singh <sandeep@techaddict.me> Closes #13413 from techaddict/SPARK-15663.
Diffstat (limited to 'python/pyspark/sql')
-rw-r--r--python/pyspark/sql/tests.py12
1 files changed, 1 insertions, 11 deletions
diff --git a/python/pyspark/sql/tests.py b/python/pyspark/sql/tests.py
index 0d9dd5ea2a..e0acde6783 100644
--- a/python/pyspark/sql/tests.py
+++ b/python/pyspark/sql/tests.py
@@ -1481,17 +1481,7 @@ class SQLTests(ReusedPySparkTestCase):
spark.sql("CREATE DATABASE some_db")
functions = dict((f.name, f) for f in spark.catalog.listFunctions())
functionsDefault = dict((f.name, f) for f in spark.catalog.listFunctions("default"))
- self.assertTrue(len(functions) > 200)
- self.assertTrue("+" in functions)
- self.assertTrue("like" in functions)
- self.assertTrue("month" in functions)
- self.assertTrue("to_unix_timestamp" in functions)
- self.assertTrue("current_database" in functions)
- self.assertEquals(functions["+"], Function(
- name="+",
- description=None,
- className="org.apache.spark.sql.catalyst.expressions.Add",
- isTemporary=True))
+ self.assertEquals(len(functions), 0)
self.assertEquals(functions, functionsDefault)
spark.catalog.registerFunction("temp_func", lambda x: str(x))
spark.sql("CREATE FUNCTION func1 AS 'org.apache.spark.data.bricks'")