aboutsummaryrefslogtreecommitdiff
path: root/sql/hive/compatibility/src/test/scala
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2015-07-31 14:13:06 -0700
committerYin Huai <yhuai@databricks.com>2015-07-31 14:13:06 -0700
commit3fc0cb92001798167a14c1377362a3335397dd4c (patch)
treeded079d1b2db6d69a706cc33c1948752c792236d /sql/hive/compatibility/src/test/scala
parent710c2b5dd2dc6b8d947303ad8dfae4539b63fe11 (diff)
downloadspark-3fc0cb92001798167a14c1377362a3335397dd4c.tar.gz
spark-3fc0cb92001798167a14c1377362a3335397dd4c.tar.bz2
spark-3fc0cb92001798167a14c1377362a3335397dd4c.zip
[SPARK-9233] [SQL] Enable code-gen in window function unit tests
Since code-gen is enabled by default, it is better to run window function tests with code-gen. https://issues.apache.org/jira/browse/SPARK-9233 Author: Yin Huai <yhuai@databricks.com> Closes #7832 from yhuai/SPARK-9233 and squashes the following commits: 4e4e4cc [Yin Huai] style ca80e07 [Yin Huai] Test window function with codegen.
Diffstat (limited to 'sql/hive/compatibility/src/test/scala')
-rw-r--r--sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala38
1 files changed, 5 insertions, 33 deletions
diff --git a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
index 24a758f531..92bb9e6d73 100644
--- a/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
+++ b/sql/hive/compatibility/src/test/scala/org/apache/spark/sql/hive/execution/HiveWindowFunctionQuerySuite.scala
@@ -32,7 +32,7 @@ import org.apache.spark.util.Utils
* for different tests and there are a few properties needed to let Hive generate golden
* files, every `createQueryTest` calls should explicitly set `reset` to `false`.
*/
-abstract class HiveWindowFunctionQueryBaseSuite extends HiveComparisonTest with BeforeAndAfter {
+class HiveWindowFunctionQuerySuite extends HiveComparisonTest with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault
private val testTempDir = Utils.createTempDir()
@@ -759,21 +759,7 @@ abstract class HiveWindowFunctionQueryBaseSuite extends HiveComparisonTest with
""".stripMargin, reset = false)
}
-class HiveWindowFunctionQueryWithoutCodeGenSuite extends HiveWindowFunctionQueryBaseSuite {
- var originalCodegenEnabled: Boolean = _
- override def beforeAll(): Unit = {
- super.beforeAll()
- originalCodegenEnabled = conf.codegenEnabled
- sql("set spark.sql.codegen=false")
- }
-
- override def afterAll(): Unit = {
- sql(s"set spark.sql.codegen=$originalCodegenEnabled")
- super.afterAll()
- }
-}
-
-abstract class HiveWindowFunctionQueryFileBaseSuite
+class HiveWindowFunctionQueryFileSuite
extends HiveCompatibilitySuite with BeforeAndAfter {
private val originalTimeZone = TimeZone.getDefault
private val originalLocale = Locale.getDefault
@@ -789,11 +775,11 @@ abstract class HiveWindowFunctionQueryFileBaseSuite
// The following settings are used for generating golden files with Hive.
// We have to use kryo to correctly let Hive serialize plans with window functions.
// This is used to generate golden files.
- sql("set hive.plan.serialization.format=kryo")
+ // sql("set hive.plan.serialization.format=kryo")
// Explicitly set fs to local fs.
- sql(s"set fs.default.name=file://$testTempDir/")
+ // sql(s"set fs.default.name=file://$testTempDir/")
// Ask Hive to run jobs in-process as a single map and reduce task.
- sql("set mapred.job.tracker=local")
+ // sql("set mapred.job.tracker=local")
}
override def afterAll() {
@@ -838,17 +824,3 @@ abstract class HiveWindowFunctionQueryFileBaseSuite
case (name, _) => realWhiteList.contains(name)
}
}
-
-class HiveWindowFunctionQueryFileWithoutCodeGenSuite extends HiveWindowFunctionQueryFileBaseSuite {
- var originalCodegenEnabled: Boolean = _
- override def beforeAll(): Unit = {
- super.beforeAll()
- originalCodegenEnabled = conf.codegenEnabled
- sql("set spark.sql.codegen=false")
- }
-
- override def afterAll(): Unit = {
- sql(s"set spark.sql.codegen=$originalCodegenEnabled")
- super.afterAll()
- }
-}