aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala
diff options
context:
space:
mode:
authorAndrew Or <andrewor14@gmail.com>2014-01-03 16:13:40 -0800
committerAndrew Or <andrewor14@gmail.com>2014-01-03 16:13:40 -0800
commit838b0e7d154699291f9915d400c59a3580173d01 (patch)
tree9079bff5940c70c744894ba63849fa1249a976f7 /core/src/test/scala
parentdf413e996fb7a4d7e05698e21d130387cf771811 (diff)
downloadspark-838b0e7d154699291f9915d400c59a3580173d01.tar.gz
spark-838b0e7d154699291f9915d400c59a3580173d01.tar.bz2
spark-838b0e7d154699291f9915d400c59a3580173d01.zip
Refactor using SparkConf
Diffstat (limited to 'core/src/test/scala')
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala15
1 files changed, 6 insertions, 9 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
index a18d466baa..6c93b1f5a0 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/ExternalAppendOnlyMapSuite.scala
@@ -4,20 +4,17 @@ import scala.collection.mutable.ArrayBuffer
import org.scalatest.{BeforeAndAfter, FunSuite}
-import org.apache.spark.{HashPartitioner, SparkContext, SparkEnv, LocalSparkContext}
+import org.apache.spark._
import org.apache.spark.SparkContext.rddToPairRDDFunctions
class ExternalAppendOnlyMapSuite extends FunSuite with BeforeAndAfter with LocalSparkContext {
override def beforeEach() {
- sc = new SparkContext("local", "test")
- System.setProperty("spark.shuffle.externalSorting", "true")
- }
-
- after {
- System.setProperty("spark.shuffle.externalSorting", "false")
- System.setProperty("spark.shuffle.buffer.mb", "1024")
- System.setProperty("spark.shuffle.buffer.fraction", "0.8")
+ val conf = new SparkConf(false)
+ conf.set("spark.shuffle.externalSorting", "true")
+ conf.set("spark.shuffle.buffer.mb", "1024")
+ conf.set("spark.shuffle.buffer.fraction", "0.8")
+ sc = new SparkContext("local", "test", conf)
}
val createCombiner: (Int => ArrayBuffer[Int]) = i => ArrayBuffer[Int](i)