aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorpetermaxlee <petermaxlee@gmail.com>2016-07-13 08:05:20 +0800
committerWenchen Fan <wenchen@databricks.com>2016-07-13 08:05:20 +0800
commit56bd399a86c4e92be412d151200cb5e4a5f6a48a (patch)
treeef355d9f472cc20015240478829e0ab1d2c4f4d1 /sql/hive
parent7f968867ff61c6b1a007874ee7e3a7421d94d373 (diff)
downloadspark-56bd399a86c4e92be412d151200cb5e4a5f6a48a.tar.gz
spark-56bd399a86c4e92be412d151200cb5e4a5f6a48a.tar.bz2
spark-56bd399a86c4e92be412d151200cb5e4a5f6a48a.zip
[SPARK-16284][SQL] Implement reflect SQL function
## What changes were proposed in this pull request? This patch implements reflect SQL function, which can be used to invoke a Java method in SQL. Slightly different from Hive, this implementation requires the class name and the method name to be literals. This implementation also supports only a smaller number of data types, and requires the function to be static, as suggested by rxin in #13969. java_method is an alias for reflect, so this should also resolve SPARK-16277. ## How was this patch tested? Added expression unit tests and an end-to-end test. Author: petermaxlee <petermaxlee@gmail.com> Closes #14138 from petermaxlee/reflect-static.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala7
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala23
2 files changed, 5 insertions, 25 deletions
diff --git a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
index 6f36abc4db..b8a75850b1 100644
--- a/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
+++ b/sql/hive/src/main/scala/org/apache/spark/sql/hive/HiveSessionCatalog.scala
@@ -235,7 +235,10 @@ private[sql] class HiveSessionCatalog(
// parse_url_tuple, posexplode, reflect2,
// str_to_map, windowingtablefunction.
private val hiveFunctions = Seq(
- "hash", "java_method", "histogram_numeric",
- "percentile", "percentile_approx", "reflect", "str_to_map"
+ "hash",
+ "histogram_numeric",
+ "percentile",
+ "percentile_approx",
+ "str_to_map"
)
}
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
index a43f0d0d7e..961d95c268 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/SQLQuerySuite.scala
@@ -996,29 +996,6 @@ class SQLQuerySuite extends QueryTest with SQLTestUtils with TestHiveSingleton {
checkAnswer(sql("SELECT CAST('775983671874188101' as BIGINT)"), Row(775983671874188101L))
}
- // `Math.exp(1.0)` has different result for different jdk version, so not use createQueryTest
- test("udf_java_method") {
- checkAnswer(sql(
- """
- |SELECT java_method("java.lang.String", "valueOf", 1),
- | java_method("java.lang.String", "isEmpty"),
- | java_method("java.lang.Math", "max", 2, 3),
- | java_method("java.lang.Math", "min", 2, 3),
- | java_method("java.lang.Math", "round", 2.5D),
- | java_method("java.lang.Math", "exp", 1.0D),
- | java_method("java.lang.Math", "floor", 1.9D)
- |FROM src tablesample (1 rows)
- """.stripMargin),
- Row(
- "1",
- "true",
- java.lang.Math.max(2, 3).toString,
- java.lang.Math.min(2, 3).toString,
- java.lang.Math.round(2.5).toString,
- java.lang.Math.exp(1.0).toString,
- java.lang.Math.floor(1.9).toString))
- }
-
test("dynamic partition value test") {
try {
sql("set hive.exec.dynamic.partition.mode=nonstrict")