From c42f5d17878974bc639bbe17724aca60e5648f0c Mon Sep 17 00:00:00 2001 From: Kay Ousterhout Date: Wed, 23 Oct 2013 17:35:01 -0700 Subject: Fixed broken tests --- .../test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) (limited to 'core') diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala index c844dad6b6..a8e2a94ddc 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala @@ -30,11 +30,8 @@ class SparkListenerSuite extends FunSuite with LocalSparkContext with ShouldMatc /** Length of time to wait while draining listener events. */ val WAIT_TIMEOUT_MILLIS = 10000 - before { - sc = new SparkContext("local", "DAGSchedulerSuite") - } - test("basic creation of StageInfo") { + sc = new SparkContext("local", "DAGSchedulerSuite") val listener = new SaveStageInfo sc.addSparkListener(listener) val rdd1 = sc.parallelize(1 to 100, 4) @@ -55,6 +52,7 @@ class SparkListenerSuite extends FunSuite with LocalSparkContext with ShouldMatc } test("StageInfo with fewer tasks than partitions") { + sc = new SparkContext("local", "DAGSchedulerSuite") val listener = new SaveStageInfo sc.addSparkListener(listener) val rdd1 = sc.parallelize(1 to 100, 4) @@ -70,6 +68,7 @@ class SparkListenerSuite extends FunSuite with LocalSparkContext with ShouldMatc } test("local metrics") { + sc = new SparkContext("local", "DAGSchedulerSuite") val listener = new SaveStageInfo sc.addSparkListener(listener) sc.addSparkListener(new StatsReportListener) -- cgit v1.2.3