aboutsummaryrefslogtreecommitdiff
path: root/sql/core
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-05-12 14:16:19 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-12 14:16:19 -0700
commit7120a2979d0a9f0f54a88b2416be7ca10e74f409 (patch)
treed3db2f178f003fc79cee2ec3fe60508e56f29f8d /sql/core
parent1e4a65e69489ff877e6da6f78b1c1306335e373c (diff)
downloadspark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.gz
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.bz2
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.zip
SPARK-1798. Tests should clean up temp files
Three issues related to temp files that tests generate – these should be touched up for hygiene but are not urgent. Modules have a log4j.properties which directs the unit-test.log output file to a directory like `[module]/target/unit-test.log`. But this ends up creating `[module]/[module]/target/unit-test.log` instead of former. The `work/` directory is not deleted by "mvn clean", in the parent and in modules. Neither is the `checkpoint/` directory created under the various external modules. Many tests create a temp directory, which is not usually deleted. This can be largely resolved by calling `deleteOnExit()` at creation and trying to call `Utils.deleteRecursively` consistently to clean up, sometimes in an `@After` method. _If anyone seconds the motion, I can create a more significant change that introduces a new test trait along the lines of `LocalSparkContext`, which provides management of temp directories for subclasses to take advantage of._ Author: Sean Owen <sowen@cloudera.com> Closes #732 from srowen/SPARK-1798 and squashes the following commits: 5af578e [Sean Owen] Try to consistently delete test temp dirs and files, and set deleteOnExit() for each b21b356 [Sean Owen] Remove work/ and checkpoint/ dirs with mvn clean bdd0f41 [Sean Owen] Remove duplicate module dir in log4j.properties output path for tests
Diffstat (limited to 'sql/core')
-rw-r--r--sql/core/src/test/scala/org/apache/spark/sql/InsertIntoSuite.scala12
1 files changed, 12 insertions, 0 deletions
diff --git a/sql/core/src/test/scala/org/apache/spark/sql/InsertIntoSuite.scala b/sql/core/src/test/scala/org/apache/spark/sql/InsertIntoSuite.scala
index 73d87963b3..4f0b85f262 100644
--- a/sql/core/src/test/scala/org/apache/spark/sql/InsertIntoSuite.scala
+++ b/sql/core/src/test/scala/org/apache/spark/sql/InsertIntoSuite.scala
@@ -29,6 +29,7 @@ class InsertIntoSuite extends QueryTest {
test("insertInto() created parquet file") {
val testFilePath = File.createTempFile("sparkSql", "pqt")
testFilePath.delete()
+ testFilePath.deleteOnExit()
val testFile = createParquetFile[TestData](testFilePath.getCanonicalPath)
testFile.registerAsTable("createAndInsertTest")
@@ -76,11 +77,14 @@ class InsertIntoSuite extends QueryTest {
sql("SELECT * FROM createAndInsertTest"),
testData.collect().toSeq
)
+
+ testFilePath.delete()
}
test("INSERT INTO parquet table") {
val testFilePath = File.createTempFile("sparkSql", "pqt")
testFilePath.delete()
+ testFilePath.deleteOnExit()
val testFile = createParquetFile[TestData](testFilePath.getCanonicalPath)
testFile.registerAsTable("createAndInsertSQLTest")
@@ -126,23 +130,31 @@ class InsertIntoSuite extends QueryTest {
sql("SELECT * FROM createAndInsertSQLTest"),
testData.collect().toSeq
)
+
+ testFilePath.delete()
}
test("Double create fails when allowExisting = false") {
val testFilePath = File.createTempFile("sparkSql", "pqt")
testFilePath.delete()
+ testFilePath.deleteOnExit()
val testFile = createParquetFile[TestData](testFilePath.getCanonicalPath)
intercept[RuntimeException] {
createParquetFile[TestData](testFilePath.getCanonicalPath, allowExisting = false)
}
+
+ testFilePath.delete()
}
test("Double create does not fail when allowExisting = true") {
val testFilePath = File.createTempFile("sparkSql", "pqt")
testFilePath.delete()
+ testFilePath.deleteOnExit()
val testFile = createParquetFile[TestData](testFilePath.getCanonicalPath)
createParquetFile[TestData](testFilePath.getCanonicalPath, allowExisting = true)
+
+ testFilePath.delete()
}
}