aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorAndy Konwinski <andyk@berkeley.edu>2012-10-05 19:53:54 -0700
committerAndy Konwinski <andyk@berkeley.edu>2012-10-05 19:53:54 -0700
commita242cdd0a65f5a9abf5998db1ced626bcbdb87bc (patch)
tree8e9d4c392e33d09239f847d159da8323e87d7ffa /core/src/test
parentd7363a6b8a1ce8620eadc7d417aee7d61d6680b8 (diff)
downloadspark-a242cdd0a65f5a9abf5998db1ced626bcbdb87bc.tar.gz
spark-a242cdd0a65f5a9abf5998db1ced626bcbdb87bc.tar.bz2
spark-a242cdd0a65f5a9abf5998db1ced626bcbdb87bc.zip
Factor subclasses of RDD out of RDD.scala into their own classes
in the rdd package.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/spark/AccumulatorSuite.scala8
1 files changed, 6 insertions, 2 deletions
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/spark/AccumulatorSuite.scala
index b920e53534..403e675f37 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/spark/AccumulatorSuite.scala
@@ -18,6 +18,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
sc.stop()
sc = null
}
+ System.clearProperty("spark.master.port")
}
test ("basic accumulation"){
@@ -91,7 +92,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
val maxI = 1000
for (nThreads <- List(1, 10)) {
// test single & multi-threaded
- val sc = new SparkContext("local[" + nThreads + "]", "test")
+ sc = new SparkContext("local[" + nThreads + "]", "test")
val setAcc = sc.accumulableCollection(mutable.HashSet[Int]())
val bufferAcc = sc.accumulableCollection(mutable.ArrayBuffer[Int]())
val mapAcc = sc.accumulableCollection(mutable.HashMap[Int,String]())
@@ -110,6 +111,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
mapAcc.value should contain (i -> i.toString)
}
sc.stop()
+ sc = null
}
}
@@ -117,7 +119,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
import SetAccum._
val maxI = 1000
for (nThreads <- List(1, 10)) { //test single & multi-threaded
- val sc = new SparkContext("local[" + nThreads + "]", "test")
+ sc = new SparkContext("local[" + nThreads + "]", "test")
val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new mutable.HashSet[Any]())
val groupedInts = (1 to (maxI/20)).map {x => (20 * (x - 1) to 20 * x).toSet}
val d = sc.parallelize(groupedInts)
@@ -125,6 +127,8 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
x => acc.localValue ++= x
}
acc.value should be ( (0 to maxI).toSet)
+ sc.stop()
+ sc = null
}
}