aboutsummaryrefslogtreecommitdiff
path: root/sql/hive
diff options
context:
space:
mode:
authorDilip Biswal <dbiswal@us.ibm.com>2016-01-18 10:28:01 -0800
committerYin Huai <yhuai@databricks.com>2016-01-18 10:28:01 -0800
commitdb9a860589bfc4f80d6cdf174a577ca538b82e6d (patch)
tree7dbf96e583b5b2794e12a669b263392b93eea698 /sql/hive
parent233d6cee96bb4c1723a5ab36efd19fd6180d651c (diff)
downloadspark-db9a860589bfc4f80d6cdf174a577ca538b82e6d.tar.gz
spark-db9a860589bfc4f80d6cdf174a577ca538b82e6d.tar.bz2
spark-db9a860589bfc4f80d6cdf174a577ca538b82e6d.zip
[SPARK-12558][FOLLOW-UP] AnalysisException when multiple functions applied in GROUP BY clause
Addresses the comments from Yin. https://github.com/apache/spark/pull/10520 Author: Dilip Biswal <dbiswal@us.ibm.com> Closes #10758 from dilipbiswal/spark-12558-followup.
Diffstat (limited to 'sql/hive')
-rw-r--r--sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala14
1 files changed, 9 insertions, 5 deletions
diff --git a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
index dfe33ba8b0..af76ff91a2 100644
--- a/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
+++ b/sql/hive/src/test/scala/org/apache/spark/sql/hive/execution/HiveUDFSuite.scala
@@ -22,7 +22,7 @@ import java.util.{ArrayList, Arrays, Properties}
import org.apache.hadoop.conf.Configuration
import org.apache.hadoop.hive.ql.udf.UDAFPercentile
-import org.apache.hadoop.hive.ql.udf.generic.{GenericUDAFAverage, GenericUDF, GenericUDFOPAnd, GenericUDTFExplode}
+import org.apache.hadoop.hive.ql.udf.generic._
import org.apache.hadoop.hive.ql.udf.generic.GenericUDF.DeferredObject
import org.apache.hadoop.hive.serde2.{AbstractSerDe, SerDeStats}
import org.apache.hadoop.hive.serde2.objectinspector.{ObjectInspector, ObjectInspectorFactory}
@@ -351,10 +351,14 @@ class HiveUDFSuite extends QueryTest with TestHiveSingleton with SQLTestUtils {
}
test("Hive UDF in group by") {
- Seq(Tuple1(1451400761)).toDF("test_date").registerTempTable("tab1")
- val count = sql("select date(cast(test_date as timestamp))" +
- " from tab1 group by date(cast(test_date as timestamp))").count()
- assert(count == 1)
+ withTempTable("tab1") {
+ Seq(Tuple1(1451400761)).toDF("test_date").registerTempTable("tab1")
+ sql(s"CREATE TEMPORARY FUNCTION testUDFToDate AS '${classOf[GenericUDFToDate].getName}'")
+ val count = sql("select testUDFToDate(cast(test_date as timestamp))" +
+ " from tab1 group by testUDFToDate(cast(test_date as timestamp))").count()
+ sql("DROP TEMPORARY FUNCTION IF EXISTS testUDFToDate")
+ assert(count == 1)
+ }
}
test("SPARK-11522 select input_file_name from non-parquet table"){