aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorImran Rashid <irashid@cloudera.com>2016-06-12 12:54:57 +0100
committerSean Owen <sowen@cloudera.com>2016-06-12 12:54:57 +0100
commit8cc22b0085475a188f229536b4f83988ae889a8e (patch)
tree0ef409f534be159843bd597cf1030fdf04aafcdc /core/src
parent9e204c62c6800e03759e04ef68268105d4b86bf2 (diff)
downloadspark-8cc22b0085475a188f229536b4f83988ae889a8e.tar.gz
spark-8cc22b0085475a188f229536b4f83988ae889a8e.tar.bz2
spark-8cc22b0085475a188f229536b4f83988ae889a8e.zip
[SPARK-15878][CORE][TEST] fix cleanup in EventLoggingListenerSuite and ReplayListenerSuite
## What changes were proposed in this pull request? These tests weren't properly using `LocalSparkContext` so weren't cleaning up correctly when tests failed. ## How was this patch tested? Jenkins. Author: Imran Rashid <irashid@cloudera.com> Closes #13602 from squito/SPARK-15878_cleanup_replaylistener.
Diffstat (limited to 'core/src')
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala2
-rw-r--r--core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala6
2 files changed, 4 insertions, 4 deletions
diff --git a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
index 176d8930aa..c4c80b5b57 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/EventLoggingListenerSuite.scala
@@ -181,7 +181,7 @@ class EventLoggingListenerSuite extends SparkFunSuite with LocalSparkContext wit
// into SPARK-6688.
val conf = getLoggingConf(testDirPath, compressionCodec)
.set("spark.hadoop.fs.defaultFS", "unsupported://example.com")
- val sc = new SparkContext("local-cluster[2,2,1024]", "test", conf)
+ sc = new SparkContext("local-cluster[2,2,1024]", "test", conf)
assert(sc.eventLogger.isDefined)
val eventLogger = sc.eventLogger.get
val eventLogPath = eventLogger.logPath
diff --git a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
index 35215c15ea..1732aca941 100644
--- a/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
+++ b/core/src/test/scala/org/apache/spark/scheduler/ReplayListenerSuite.scala
@@ -23,7 +23,7 @@ import java.net.URI
import org.json4s.jackson.JsonMethods._
import org.scalatest.BeforeAndAfter
-import org.apache.spark.{SparkConf, SparkContext, SparkFunSuite}
+import org.apache.spark.{LocalSparkContext, SparkConf, SparkContext, SparkFunSuite}
import org.apache.spark.deploy.SparkHadoopUtil
import org.apache.spark.io.CompressionCodec
import org.apache.spark.util.{JsonProtocol, JsonProtocolSuite, Utils}
@@ -31,7 +31,7 @@ import org.apache.spark.util.{JsonProtocol, JsonProtocolSuite, Utils}
/**
* Test whether ReplayListenerBus replays events from logs correctly.
*/
-class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter {
+class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter with LocalSparkContext {
private val fileSystem = Utils.getHadoopFileSystem("/",
SparkHadoopUtil.get.newConfiguration(new SparkConf()))
private var testDir: File = _
@@ -101,7 +101,7 @@ class ReplayListenerSuite extends SparkFunSuite with BeforeAndAfter {
fileSystem.mkdirs(logDirPath)
val conf = EventLoggingListenerSuite.getLoggingConf(logDirPath, codecName)
- val sc = new SparkContext("local-cluster[2,1,1024]", "Test replay", conf)
+ sc = new SparkContext("local-cluster[2,1,1024]", "Test replay", conf)
// Run a few jobs
sc.parallelize(1 to 100, 1).count()