aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorHuJiayin <jiayin.hu@intel.com>2015-08-01 21:44:57 -0700
committerDavies Liu <davies.liu@gmail.com>2015-08-01 21:44:57 -0700
commit00cd92f32f17ca57d47aa2dcc716eb707aaee799 (patch)
tree87fae8a2daea19abc7dee69b551c5c0e6f54bf4b /python
parent5d9e33d9a2633e45082ac395a64646364f22f4c4 (diff)
downloadspark-00cd92f32f17ca57d47aa2dcc716eb707aaee799.tar.gz
spark-00cd92f32f17ca57d47aa2dcc716eb707aaee799.tar.bz2
spark-00cd92f32f17ca57d47aa2dcc716eb707aaee799.zip
[SPARK-8269] [SQL] string function: initcap
This PR is based on #7208 , thanks to HuJiayin Closes #7208 Author: HuJiayin <jiayin.hu@intel.com> Author: Davies Liu <davies@databricks.com> Closes #7850 from davies/initcap and squashes the following commits: 54472e9 [Davies Liu] fix python test 17ffe51 [Davies Liu] Merge branch 'master' of github.com:apache/spark into initcap ca46390 [Davies Liu] Merge branch 'master' of github.com:apache/spark into initcap 3a906e4 [Davies Liu] implement title case in UTF8String 8b2506a [HuJiayin] Update functions.py 2cd43e5 [HuJiayin] fix python style check b616c0e [HuJiayin] add python api 1f5a0ef [HuJiayin] add codegen 7e0c604 [HuJiayin] Merge branch 'master' of https://github.com/apache/spark into initcap 6a0b958 [HuJiayin] add column c79482d [HuJiayin] support soundex 7ce416b [HuJiayin] support initcap rebase code
Diffstat (limited to 'python')
-rw-r--r--python/pyspark/sql/functions.py12
1 files changed, 12 insertions, 0 deletions
diff --git a/python/pyspark/sql/functions.py b/python/pyspark/sql/functions.py
index 96975f54ff..a73ecc7d93 100644
--- a/python/pyspark/sql/functions.py
+++ b/python/pyspark/sql/functions.py
@@ -958,6 +958,18 @@ def substring_index(str, delim, count):
return Column(sc._jvm.functions.substring_index(_to_java_column(str), delim, count))
+@ignore_unicode_prefix
+@since(1.5)
+def initcap(col):
+ """Translate the first letter of each word to upper case in the sentence.
+
+ >>> sqlContext.createDataFrame([('ab cd',)], ['a']).select(initcap("a").alias('v')).collect()
+ [Row(v=u'Ab Cd')]
+ """
+ sc = SparkContext._active_spark_context
+ return Column(sc._jvm.functions.initcap(_to_java_column(col)))
+
+
@since(1.5)
def size(col):
"""