aboutsummaryrefslogtreecommitdiff
path: root/core/src/main
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-05-12 14:16:19 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-12 14:16:19 -0700
commit7120a2979d0a9f0f54a88b2416be7ca10e74f409 (patch)
treed3db2f178f003fc79cee2ec3fe60508e56f29f8d /core/src/main
parent1e4a65e69489ff877e6da6f78b1c1306335e373c (diff)
downloadspark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.gz
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.bz2
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.zip
SPARK-1798. Tests should clean up temp files
Three issues related to temp files that tests generate – these should be touched up for hygiene but are not urgent. Modules have a log4j.properties which directs the unit-test.log output file to a directory like `[module]/target/unit-test.log`. But this ends up creating `[module]/[module]/target/unit-test.log` instead of former. The `work/` directory is not deleted by "mvn clean", in the parent and in modules. Neither is the `checkpoint/` directory created under the various external modules. Many tests create a temp directory, which is not usually deleted. This can be largely resolved by calling `deleteOnExit()` at creation and trying to call `Utils.deleteRecursively` consistently to clean up, sometimes in an `@After` method. _If anyone seconds the motion, I can create a more significant change that introduces a new test trait along the lines of `LocalSparkContext`, which provides management of temp directories for subclasses to take advantage of._ Author: Sean Owen <sowen@cloudera.com> Closes #732 from srowen/SPARK-1798 and squashes the following commits: 5af578e [Sean Owen] Try to consistently delete test temp dirs and files, and set deleteOnExit() for each b21b356 [Sean Owen] Remove work/ and checkpoint/ dirs with mvn clean bdd0f41 [Sean Owen] Remove duplicate module dir in log4j.properties output path for tests
Diffstat (limited to 'core/src/main')
-rw-r--r--core/src/main/scala/org/apache/spark/TestUtils.scala1
-rw-r--r--core/src/main/scala/org/apache/spark/util/Utils.scala18
2 files changed, 11 insertions, 8 deletions
diff --git a/core/src/main/scala/org/apache/spark/TestUtils.scala b/core/src/main/scala/org/apache/spark/TestUtils.scala
index 8ae0215482..885c6829a2 100644
--- a/core/src/main/scala/org/apache/spark/TestUtils.scala
+++ b/core/src/main/scala/org/apache/spark/TestUtils.scala
@@ -43,6 +43,7 @@ private[spark] object TestUtils {
*/
def createJarWithClasses(classNames: Seq[String], value: String = ""): URL = {
val tempDir = Files.createTempDir()
+ tempDir.deleteOnExit()
val files = for (name <- classNames) yield createCompiledClass(name, tempDir, value)
val jarFile = new File(tempDir, "testJar-%s.jar".format(System.currentTimeMillis()))
createJar(files, jarFile)
diff --git a/core/src/main/scala/org/apache/spark/util/Utils.scala b/core/src/main/scala/org/apache/spark/util/Utils.scala
index 8f7594ada2..0631e54237 100644
--- a/core/src/main/scala/org/apache/spark/util/Utils.scala
+++ b/core/src/main/scala/org/apache/spark/util/Utils.scala
@@ -586,15 +586,17 @@ private[spark] object Utils extends Logging {
* Don't follow directories if they are symlinks.
*/
def deleteRecursively(file: File) {
- if ((file.isDirectory) && !isSymlink(file)) {
- for (child <- listFilesSafely(file)) {
- deleteRecursively(child)
+ if (file != null) {
+ if ((file.isDirectory) && !isSymlink(file)) {
+ for (child <- listFilesSafely(file)) {
+ deleteRecursively(child)
+ }
}
- }
- if (!file.delete()) {
- // Delete can also fail if the file simply did not exist
- if (file.exists()) {
- throw new IOException("Failed to delete: " + file.getAbsolutePath)
+ if (!file.delete()) {
+ // Delete can also fail if the file simply did not exist
+ if (file.exists()) {
+ throw new IOException("Failed to delete: " + file.getAbsolutePath)
+ }
}
}
}