aboutsummaryrefslogtreecommitdiff
path: root/sql/core/src/test
diff options
context:
space:
mode:
authorYin Huai <yhuai@databricks.com>2016-07-21 12:10:26 -0700
committerYin Huai <yhuai@databricks.com>2016-07-21 12:10:26 -0700
commit9abd99b3c318d0ec8b91124d40f3ab9e9d835dcf (patch)
tree028cc1cfcc33b9afb091a30c945a97bc33c2883c /sql/core/src/test
parent235cb256d06653bcde4c3ed6b081503a94996321 (diff)
downloadspark-9abd99b3c318d0ec8b91124d40f3ab9e9d835dcf.tar.gz
spark-9abd99b3c318d0ec8b91124d40f3ab9e9d835dcf.tar.bz2
spark-9abd99b3c318d0ec8b91124d40f3ab9e9d835dcf.zip
[SPARK-16656][SQL] Try to make CreateTableAsSelectSuite more stable
## What changes were proposed in this pull request? https://amplab.cs.berkeley.edu/jenkins/job/SparkPullRequestBuilder/62593/testReport/junit/org.apache.spark.sql.sources/CreateTableAsSelectSuite/create_a_table__drop_it_and_create_another_one_with_the_same_name/ shows that `create a table, drop it and create another one with the same name` failed. But other runs were good. Seems it is a flaky test. This PR tries to make this test more stable. Author: Yin Huai <yhuai@databricks.com> Closes #14289 from yhuai/SPARK-16656.
Diffstat (limited to 'sql/core/src/test')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala25
1 files changed, 15 insertions, 10 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
index f9a07dbdf0..251a25665a 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/sources/CreateTableAsSelectSuite.scala
@@ -19,7 +19,7 @@ package org.apache.spark.sql.sources
import java.io.File
-import org.scalatest.BeforeAndAfter
+import org.scalatest.BeforeAndAfterEach
import org.apache.spark.SparkException
import org.apache.spark.sql.catalyst.TableIdentifier
@@ -29,14 +29,16 @@ import org.apache.spark.sql.execution.datasources.BucketSpec
import org.apache.spark.sql.test.SharedSQLContext
import org.apache.spark.util.Utils
-class CreateTableAsSelectSuite extends DataSourceTest with SharedSQLContext with BeforeAndAfter {
+class CreateTableAsSelectSuite
+ extends DataSourceTest
+ with SharedSQLContext
+ with BeforeAndAfterEach {
protected override lazy val sql = spark.sql _
private var path: File = null
override def beforeAll(): Unit = {
super.beforeAll()
- path = Utils.createTempDir()
val rdd = sparkContext.parallelize((1 to 10).map(i => s"""{"a":$i, "b":"str${i}"}"""))
spark.read.json(rdd).createOrReplaceTempView("jt")
}
@@ -44,18 +46,21 @@ class CreateTableAsSelectSuite extends DataSourceTest with SharedSQLContext with
override def afterAll(): Unit = {
try {
spark.catalog.dropTempView("jt")
- if (path.exists()) {
- Utils.deleteRecursively(path)
- }
+ Utils.deleteRecursively(path)
} finally {
super.afterAll()
}
}
- before {
- if (path.exists()) {
- Utils.deleteRecursively(path)
- }
+ override def beforeEach(): Unit = {
+ super.beforeEach()
+ path = Utils.createTempDir()
+ path.delete()
+ }
+
+ override def afterEach(): Unit = {
+ Utils.deleteRecursively(path)
+ super.afterEach()
}
test("CREATE TABLE USING AS SELECT") {