aboutsummaryrefslogtreecommitdiff
path: root/streaming
diff options
context:
space:
mode:
authorXiangrui Meng <meng@databricks.com>2014-08-19 22:05:29 -0700
committerXiangrui Meng <meng@databricks.com>2014-08-19 22:05:29 -0700
commitfce5c0fb6384f3a142a4155525a5d62640725150 (patch)
tree588a1cccbc995bcba1508442ce40b3f7e094dc82 /streaming
parent068b6fe6a10eb1c6b2102d88832203267f030e85 (diff)
downloadspark-fce5c0fb6384f3a142a4155525a5d62640725150.tar.gz
spark-fce5c0fb6384f3a142a4155525a5d62640725150.tar.bz2
spark-fce5c0fb6384f3a142a4155525a5d62640725150.zip
[HOTFIX][Streaming][MLlib] use temp folder for checkpoint
or Jenkins will complain about no Apache header in checkpoint files. tdas rxin Author: Xiangrui Meng <meng@databricks.com> Closes #2046 from mengxr/tmp-checkpoint and squashes the following commits: 0d3ec73 [Xiangrui Meng] remove ssc.stop 9797843 [Xiangrui Meng] change checkpointDir to lazy val 89964ab [Xiangrui Meng] use temp folder for checkpoint
Diffstat (limited to 'streaming')
-rw-r--r--streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala17
1 files changed, 11 insertions, 6 deletions
diff --git a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
index f095da9cb5..759baacaa4 100644
--- a/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
+++ b/streaming/src/test/scala/org/apache/spark/streaming/TestSuiteBase.scala
@@ -17,18 +17,18 @@
package org.apache.spark.streaming
-import org.apache.spark.streaming.dstream.{DStream, InputDStream, ForEachDStream}
-import org.apache.spark.streaming.util.ManualClock
+import java.io.{ObjectInputStream, IOException}
import scala.collection.mutable.ArrayBuffer
import scala.collection.mutable.SynchronizedBuffer
import scala.reflect.ClassTag
-import java.io.{ObjectInputStream, IOException}
-
import org.scalatest.{BeforeAndAfter, FunSuite}
+import com.google.common.io.Files
-import org.apache.spark.{SparkContext, SparkConf, Logging}
+import org.apache.spark.streaming.dstream.{DStream, InputDStream, ForEachDStream}
+import org.apache.spark.streaming.util.ManualClock
+import org.apache.spark.{SparkConf, Logging}
import org.apache.spark.rdd.RDD
/**
@@ -119,7 +119,12 @@ trait TestSuiteBase extends FunSuite with BeforeAndAfter with Logging {
def batchDuration = Seconds(1)
// Directory where the checkpoint data will be saved
- def checkpointDir = "checkpoint"
+ lazy val checkpointDir = {
+ val dir = Files.createTempDir()
+ logDebug(s"checkpointDir: $dir")
+ dir.deleteOnExit()
+ dir.toString
+ }
// Number of partitions of the input parallel collections created for testing
def numInputPartitions = 2