aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/java/org
diff options
context:
space:
mode:
authorSean Owen <sowen@cloudera.com>2014-05-12 14:16:19 -0700
committerPatrick Wendell <pwendell@gmail.com>2014-05-12 14:16:19 -0700
commit7120a2979d0a9f0f54a88b2416be7ca10e74f409 (patch)
treed3db2f178f003fc79cee2ec3fe60508e56f29f8d /core/src/test/java/org
parent1e4a65e69489ff877e6da6f78b1c1306335e373c (diff)
downloadspark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.gz
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.tar.bz2
spark-7120a2979d0a9f0f54a88b2416be7ca10e74f409.zip
SPARK-1798. Tests should clean up temp files
Three issues related to temp files that tests generate – these should be touched up for hygiene but are not urgent. Modules have a log4j.properties which directs the unit-test.log output file to a directory like `[module]/target/unit-test.log`. But this ends up creating `[module]/[module]/target/unit-test.log` instead of former. The `work/` directory is not deleted by "mvn clean", in the parent and in modules. Neither is the `checkpoint/` directory created under the various external modules. Many tests create a temp directory, which is not usually deleted. This can be largely resolved by calling `deleteOnExit()` at creation and trying to call `Utils.deleteRecursively` consistently to clean up, sometimes in an `@After` method. _If anyone seconds the motion, I can create a more significant change that introduces a new test trait along the lines of `LocalSparkContext`, which provides management of temp directories for subclasses to take advantage of._ Author: Sean Owen <sowen@cloudera.com> Closes #732 from srowen/SPARK-1798 and squashes the following commits: 5af578e [Sean Owen] Try to consistently delete test temp dirs and files, and set deleteOnExit() for each b21b356 [Sean Owen] Remove work/ and checkpoint/ dirs with mvn clean bdd0f41 [Sean Owen] Remove duplicate module dir in log4j.properties output path for tests
Diffstat (limited to 'core/src/test/java/org')
-rw-r--r--core/src/test/java/org/apache/spark/JavaAPISuite.java18
1 files changed, 5 insertions, 13 deletions
diff --git a/core/src/test/java/org/apache/spark/JavaAPISuite.java b/core/src/test/java/org/apache/spark/JavaAPISuite.java
index 1912015827..3dd79243ab 100644
--- a/core/src/test/java/org/apache/spark/JavaAPISuite.java
+++ b/core/src/test/java/org/apache/spark/JavaAPISuite.java
@@ -18,7 +18,6 @@
package org.apache.spark;
import java.io.*;
-import java.lang.StringBuilder;
import java.util.*;
import scala.Tuple2;
@@ -49,16 +48,20 @@ import org.apache.spark.partial.BoundedDouble;
import org.apache.spark.partial.PartialResult;
import org.apache.spark.storage.StorageLevel;
import org.apache.spark.util.StatCounter;
+import org.apache.spark.util.Utils;
// The test suite itself is Serializable so that anonymous Function implementations can be
// serialized, as an alternative to converting these anonymous classes to static inner classes;
// see http://stackoverflow.com/questions/758570/.
public class JavaAPISuite implements Serializable {
private transient JavaSparkContext sc;
+ private transient File tempDir;
@Before
public void setUp() {
sc = new JavaSparkContext("local", "JavaAPISuite");
+ tempDir = Files.createTempDir();
+ tempDir.deleteOnExit();
}
@After
@@ -67,6 +70,7 @@ public class JavaAPISuite implements Serializable {
sc = null;
// To avoid Akka rebinding to the same port, since it doesn't unbind immediately on shutdown
System.clearProperty("spark.driver.port");
+ Utils.deleteRecursively(tempDir);
}
static class ReverseIntComparator implements Comparator<Integer>, Serializable {
@@ -611,7 +615,6 @@ public class JavaAPISuite implements Serializable {
@Test
public void textFiles() throws IOException {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
rdd.saveAsTextFile(outputDir);
@@ -630,7 +633,6 @@ public class JavaAPISuite implements Serializable {
byte[] content1 = "spark is easy to use.\n".getBytes("utf-8");
byte[] content2 = "spark is also easy to use.\n".getBytes("utf-8");
- File tempDir = Files.createTempDir();
String tempDirName = tempDir.getAbsolutePath();
DataOutputStream ds = new DataOutputStream(new FileOutputStream(tempDirName + "/part-00000"));
ds.write(content1);
@@ -653,7 +655,6 @@ public class JavaAPISuite implements Serializable {
@Test
public void textFilesCompressed() throws IOException {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
rdd.saveAsTextFile(outputDir, DefaultCodec.class);
@@ -667,7 +668,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void sequenceFile() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -697,7 +697,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void writeWithNewAPIHadoopFile() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -728,7 +727,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void readWithNewAPIHadoopFile() throws IOException {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -758,7 +756,6 @@ public class JavaAPISuite implements Serializable {
@Test
public void objectFilesOfInts() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4));
rdd.saveAsObjectFile(outputDir);
@@ -771,7 +768,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void objectFilesOfComplexTypes() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -788,7 +784,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void hadoopFile() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -818,7 +813,6 @@ public class JavaAPISuite implements Serializable {
@SuppressWarnings("unchecked")
@Test
public void hadoopFileCompressed() {
- File tempDir = Files.createTempDir();
String outputDir = new File(tempDir, "output_compressed").getAbsolutePath();
List<Tuple2<Integer, String>> pairs = Arrays.asList(
new Tuple2<Integer, String>(1, "a"),
@@ -948,7 +942,6 @@ public class JavaAPISuite implements Serializable {
@Test
public void checkpointAndComputation() {
- File tempDir = Files.createTempDir();
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5));
sc.setCheckpointDir(tempDir.getAbsolutePath());
Assert.assertEquals(false, rdd.isCheckpointed());
@@ -960,7 +953,6 @@ public class JavaAPISuite implements Serializable {
@Test
public void checkpointAndRestore() {
- File tempDir = Files.createTempDir();
JavaRDD<Integer> rdd = sc.parallelize(Arrays.asList(1, 2, 3, 4, 5));
sc.setCheckpointDir(tempDir.getAbsolutePath());
Assert.assertEquals(false, rdd.isCheckpointed());