aboutsummaryrefslogtreecommitdiff
path: root/repl/src/test/scala/org
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-05-12 14:16:19 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-12 14:16:19 -0700
commit7120a2979d0a9f0f54a88b2416be7ca10e74f409 (patch)
treed3db2f178f003fc79cee2ec3fe60508e56f29f8d /repl/src/test/scala/org
parent1e4a65e69489ff877e6da6f78b1c1306335e373c (diff)
downloadspark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.gz
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.bz2
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.zip
SPARK-1798. Tests should clean up temp files
Three issues related to temp files that tests generate – these should be touched up for hygiene but are not urgent. Modules have a log4j.properties which directs the unit-test.log output file to a directory like `[module]/target/unit-test.log`. But this ends up creating `[module]/[module]/target/unit-test.log` instead of former. The `work/` directory is not deleted by "mvn clean", in the parent and in modules. Neither is the `checkpoint/` directory created under the various external modules. Many tests create a temp directory, which is not usually deleted. This can be largely resolved by calling `deleteOnExit()` at creation and trying to call `Utils.deleteRecursively` consistently to clean up, sometimes in an `@After` method. _If anyone seconds the motion, I can create a more significant change that introduces a new test trait along the lines of `LocalSparkContext`, which provides management of temp directories for subclasses to take advantage of._ Author: Sean Owen <sowen@cloudera.com> Closes #732 from srowen/SPARK-1798 and squashes the following commits: 5af578e [Sean Owen] Try to consistently delete test temp dirs and files, and set deleteOnExit() for each b21b356 [Sean Owen] Remove work/ and checkpoint/ dirs with mvn clean bdd0f41 [Sean Owen] Remove duplicate module dir in log4j.properties output path for tests
Diffstat (limited to 'repl/src/test/scala/org')
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala24
-rw-r--r--repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala3
2 files changed, 22 insertions, 5 deletions
diff --git a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
index 336df988a1..c0af7ceb6d 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ExecutorClassLoaderSuite.scala
@@ -18,7 +18,7 @@
package org.apache.spark.repl
import java.io.File
-import java.net.URLClassLoader
+import java.net.{URL, URLClassLoader}
import org.scalatest.BeforeAndAfterAll
import org.scalatest.FunSuite
@@ -26,21 +26,35 @@ import org.scalatest.FunSuite
import com.google.common.io.Files
import org.apache.spark.TestUtils
+import org.apache.spark.util.Utils
class ExecutorClassLoaderSuite extends FunSuite with BeforeAndAfterAll {
val childClassNames = List("ReplFakeClass1", "ReplFakeClass2")
val parentClassNames = List("ReplFakeClass1", "ReplFakeClass2", "ReplFakeClass3")
- val tempDir1 = Files.createTempDir()
- val tempDir2 = Files.createTempDir()
- val url1 = "file://" + tempDir1
- val urls2 = List(tempDir2.toURI.toURL).toArray
+ var tempDir1: File = _
+ var tempDir2: File = _
+ var url1: String = _
+ var urls2: Array[URL] = _
override def beforeAll() {
+ super.beforeAll()
+ tempDir1 = Files.createTempDir()
+ tempDir1.deleteOnExit()
+ tempDir2 = Files.createTempDir()
+ tempDir2.deleteOnExit()
+ url1 = "file://" + tempDir1
+ urls2 = List(tempDir2.toURI.toURL).toArray
childClassNames.foreach(TestUtils.createCompiledClass(_, tempDir1, "1"))
parentClassNames.foreach(TestUtils.createCompiledClass(_, tempDir2, "2"))
}
+ override def afterAll() {
+ super.afterAll()
+ Utils.deleteRecursively(tempDir1)
+ Utils.deleteRecursively(tempDir2)
+ }
+
test("child first") {
val parentLoader = new URLClassLoader(urls2, null)
val classLoader = new ExecutorClassLoader(url1, parentLoader, true)
diff --git a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
index 566d96e16e..95460aa205 100644
--- a/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
+++ b/repl/src/test/scala/org/apache/spark/repl/ReplSuite.scala
@@ -26,6 +26,7 @@ import com.google.common.io.Files
import org.scalatest.FunSuite
import org.apache.spark.SparkContext
import org.apache.commons.lang3.StringEscapeUtils
+import org.apache.spark.util.Utils
class ReplSuite extends FunSuite {
@@ -178,6 +179,7 @@ class ReplSuite extends FunSuite {
test("interacting with files") {
val tempDir = Files.createTempDir()
+ tempDir.deleteOnExit()
val out = new FileWriter(tempDir + "/input")
out.write("Hello world!\n")
out.write("What's up?\n")
@@ -196,6 +198,7 @@ class ReplSuite extends FunSuite {
assertContains("res0: Long = 3", output)
assertContains("res1: Long = 3", output)
assertContains("res2: Long = 3", output)
+ Utils.deleteRecursively(tempDir)
}
test("local-cluster mode") {