diff options
author | wujian <jan.chou.wu@gmail.com> | 2016-07-08 14:38:05 -0700 |
---|---|---|
committer | Reynold Xin <rxin@databricks.com> | 2016-07-08 14:38:05 -0700 |
commit | f5fef69143b2a83bb8b168b7417e92659af0c72c (patch) | |
tree | 322c0af0ab3388c4d68656e6dd675d41799b04be /sql/core/src/test | |
parent | 142df4834bc33dc7b84b626c6ee3508ab1abe015 (diff) | |
download | spark-f5fef69143b2a83bb8b168b7417e92659af0c72c.tar.gz spark-f5fef69143b2a83bb8b168b7417e92659af0c72c.tar.bz2 spark-f5fef69143b2a83bb8b168b7417e92659af0c72c.zip |
[SPARK-16281][SQL] Implement parse_url SQL function
## What changes were proposed in this pull request?
This PR adds parse_url SQL functions in order to remove Hive fallback.
A new implementation of #13999
## How was this patch tested?
Pass the exist tests including new testcases.
Author: wujian <jan.chou.wu@gmail.com>
Closes #14008 from janplus/SPARK-16281.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r-- | sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala | 15 |
1 files changed, 15 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala index 044ac22328..f509551b1e 100644 --- a/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala +++ b/sql/core/src/test/scala/org/apache/spark/sql/StringFunctionsSuite.scala @@ -228,6 +228,21 @@ class StringFunctionsSuite extends QueryTest with SharedSQLContext { Row("???hi", "hi???", "h", "h")) } + test("string parse_url function") { + val df = Seq[String](("http://userinfo@spark.apache.org/path?query=1#Ref")) + .toDF("url") + + checkAnswer( + df.selectExpr( + "parse_url(url, 'HOST')", "parse_url(url, 'PATH')", + "parse_url(url, 'QUERY')", "parse_url(url, 'REF')", + "parse_url(url, 'PROTOCOL')", "parse_url(url, 'FILE')", + "parse_url(url, 'AUTHORITY')", "parse_url(url, 'USERINFO')", + "parse_url(url, 'QUERY', 'query')"), + Row("spark.apache.org", "/path", "query=1", "Ref", + "http", "/path?query=1", "userinfo@spark.apache.org", "userinfo", "1")) + } + test("string repeat function") { val df = Seq(("hi", 2)).toDF("a", "b") |