aboutsummaryrefslogtreecommitdiff
path: root/python/pyspark/sql/functions.py
diff options
context:
space:
mode:
Diffstat (limited to 'python/pyspark/sql/functions.py')
-rw-r--r--python/pyspark/sql/functions.py28
1 files changed, 28 insertions, 0 deletions
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index 12263e6a75..8a470ce19b 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -383,6 +383,34 @@ def randn(seed=None):
@ignore_unicode_prefix
@since(1.5)
+def hex(col):
+ """Computes hex value of the given column, which could be StringType,
+ BinaryType, IntegerType or LongType.
+
+ >>> sqlContext.createDataFrame([('ABC', 3)], ['a', 'b']).select(hex('a'), hex('b')).collect()
+ [Row(hex(a)=u'414243', hex(b)=u'3')]
+ """
+ sc = SparkContext._active_spark_context
+ jc = sc._jvm.functions.hex(_to_java_column(col))
+ return Column(jc)
+
+
+@ignore_unicode_prefix
+@since(1.5)
+def unhex(col):
+ """Inverse of hex. Interprets each pair of characters as a hexadecimal number
+ and converts to the byte representation of number.
+
+ >>> sqlContext.createDataFrame([('414243',)], ['a']).select(unhex('a')).collect()
+ [Row(unhex(a)=bytearray(b'ABC'))]
+ """
+ sc = SparkContext._active_spark_context
+ jc = sc._jvm.functions.unhex(_to_java_column(col))
+ return Column(jc)
+
+
+@ignore_unicode_prefix
+@since(1.5)
def sha1(col):
"""Returns the hex string result of SHA-1.