aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorCheng Lian <lian@databricks.com>2015-05-27 13:09:33 -0700
committerYin Huai <yhuai@databricks.com>2015-05-27 13:09:33 -0700
commitb97ddff000b99adca3dd8fe13d01054fd5014fa0 (patch)
treeb5e0bdbf704f7c1179e059d08fff71070113ad94 /sql/core
parent8161562eabc1eff430cfd9d8eaf413a8c4ef2cfb (diff)
downloadspark-b97ddff000b99adca3dd8fe13d01054fd5014fa0.tar.gz
spark-b97ddff000b99adca3dd8fe13d01054fd5014fa0.tar.bz2
spark-b97ddff000b99adca3dd8fe13d01054fd5014fa0.zip
[SPARK-7684] [SQL] Refactoring MetastoreDataSourcesSuite to workaround SPARK-7684
As stated in SPARK-7684, currently `TestHive.reset` has some execution order specific bug, which makes running specific test suites locally pretty frustrating. This PR refactors `MetastoreDataSourcesSuite` (which relies on `TestHive.reset` heavily) using various `withXxx` utility methods in `SQLTestUtils` to ask each test case to cleanup their own mess so that we can avoid calling `TestHive.reset`. Author: Cheng Lian <lian@databricks.com> Author: Yin Huai <yhuai@databricks.com> Closes #6353 from liancheng/workaround-spark-7684 and squashes the following commits: 26939aa [Yin Huai] Move the initialization of jsonFilePath to beforeAll. a423d48 [Cheng Lian] Fixes Scala style issue dfe45d0 [Cheng Lian] Refactors MetastoreDataSourcesSuite to workaround SPARK-7684 92a116d [Cheng Lian] Fixes minor styling issues
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala4
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala12
2 files changed, 12 insertions, 4 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
index bbf9ab113c..98ba3c9928 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/QueryTest.scala
@@ -67,6 +67,10 @@ class QueryTest extends PlanTest {
checkAnswer(df, Seq(expectedAnswer))
}
+ protected def checkAnswer(df: DataFrame, expectedAnswer: DataFrame): Unit = {
+ checkAnswer(df, expectedAnswer.collect())
+ }
+
def sqlTest(sqlString: String, expectedAnswer: Seq[Row])(implicit sqlContext: SQLContext) {
test(sqlString) {
checkAnswer(sqlContext.sql(sqlString), expectedAnswer)
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
index ca66cdc482..17a8b0cca0 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/test/SQLTestUtils.scala
@@ -75,14 +75,18 @@ trait SQLTestUtils {
/**
* Drops temporary table `tableName` after calling `f`.
*/
- protected def withTempTable(tableName: String)(f: => Unit): Unit = {
- try f finally sqlContext.dropTempTable(tableName)
+ protected def withTempTable(tableNames: String*)(f: => Unit): Unit = {
+ try f finally tableNames.foreach(sqlContext.dropTempTable)
}
/**
* Drops table `tableName` after calling `f`.
*/
- protected def withTable(tableName: String)(f: => Unit): Unit = {
- try f finally sqlContext.sql(s"DROP TABLE IF EXISTS $tableName")
+ protected def withTable(tableNames: String*)(f: => Unit): Unit = {
+ try f finally {
+ tableNames.foreach { name =>
+ sqlContext.sql(s"DROP TABLE IF EXISTS $name")
+ }
+ }
}
}