diff options
author | Davies Liu <davies.liu@gmail.com> | 2015-08-01 00:41:15 -0700 |
---|---|---|
committer | Davies Liu <davies.liu@gmail.com> | 2015-08-01 00:41:15 -0700 |
commit | 60ea7ab4bbfaea29a6cdf4e0e71ddc56afd04de6 (patch) | |
tree | c3a12b90f6c9730c05e4e3b41114277227a609e7 /python/pyspark | |
parent | 1d59a4162bf5142af270ed7f4b3eab42870c87b7 (diff) | |
download | spark-60ea7ab4bbfaea29a6cdf4e0e71ddc56afd04de6.tar.gz spark-60ea7ab4bbfaea29a6cdf4e0e71ddc56afd04de6.tar.bz2 spark-60ea7ab4bbfaea29a6cdf4e0e71ddc56afd04de6.zip |
Revert "[SPARK-8232] [SQL] Add sort_array support"
This reverts commit 67ad4e21fc68336b0ad6f9a363fb5ebb51f592bf.
Diffstat (limited to 'python/pyspark')
-rw-r--r-- | python/pyspark/sql/functions.py | 20 |
1 files changed, 0 insertions, 20 deletions
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py index fb542e6cff..89a2a5ceaa 100644 --- a/python/pyspark/sql/functions.py +++ b/python/pyspark/sql/functions.py @@ -51,7 +51,6 @@ __all__ = [ 'sha1', 'sha2', 'size', - 'sort_array', 'sparkPartitionId', 'struct', 'udf', @@ -571,10 +570,8 @@ def length(col): def format_number(col, d): """Formats the number X to a format like '#,###,###.##', rounded to d decimal places, and returns the result as a string. - :param col: the column name of the numeric value to be formatted :param d: the N decimal places - >>> sqlContext.createDataFrame([(5,)], ['a']).select(format_number('a', 4).alias('v')).collect() [Row(v=u'5.0000')] """ @@ -971,23 +968,6 @@ def soundex(col): return Column(sc._jvm.functions.size(_to_java_column(col))) -@since(1.5) -def sort_array(col, asc=True): - """ - Collection function: sorts the input array for the given column in ascending order. - - :param col: name of column or expression - - >>> df = sqlContext.createDataFrame([([2, 1, 3],),([1],),([],)], ['data']) - >>> df.select(sort_array(df.data).alias('r')).collect() - [Row(r=[1, 2, 3]), Row(r=[1]), Row(r=[])] - >>> df.select(sort_array(df.data, asc=False).alias('r')).collect() - [Row(r=[3, 2, 1]), Row(r=[1]), Row(r=[])] - """ - sc = SparkContext._active_spark_context - return Column(sc._jvm.functions.sort_array(_to_java_column(col), asc)) - - class UserDefinedFunction(object): """ User defined function in Python |