aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorKay Ousterhout <kayousterhout@gmail.com>2015-04-08 10:26:45 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-04-08 10:26:45 -0700
commit9d44ddce1d1e19011026605549c37d0db6d6afa1 (patch)
tree3250cc1b7f84056962fa2452bd88e65f4726ecb4 /core/src/test
parentf7e21dd1ec4541be54eb01d8b15cfcc6714feed0 (diff)
downloadspark-9d44ddce1d1e19011026605549c37d0db6d6afa1.tar.gz
spark-9d44ddce1d1e19011026605549c37d0db6d6afa1.tar.bz2
spark-9d44ddce1d1e19011026605549c37d0db6d6afa1.zip
[SPARK-6753] Clone SparkConf in ShuffleSuite tests
Prior to this change, the unit test for SPARK-3426 did not clone the original SparkConf, which meant that that test did not use the options set by suites that subclass ShuffleSuite.scala. This commit fixes that problem. JoshRosen would be great if you could take a look at this, since you wrote this test originally. Author: Kay Ousterhout <kayousterhout@gmail.com> Closes #5401 from kayousterhout/SPARK-6753 and squashes the following commits: 368c540 [Kay Ousterhout] [SPARK-6753] Clone SparkConf in ShuffleSuite tests
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index f57921b768..30b6184c77 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -242,14 +242,14 @@ abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContex
shuffleSpillCompress <- Set(true, false);
shuffleCompress <- Set(true, false)
) {
- val conf = new SparkConf()
+ val myConf = conf.clone()
.setAppName("test")
.setMaster("local")
.set("spark.shuffle.spill.compress", shuffleSpillCompress.toString)
.set("spark.shuffle.compress", shuffleCompress.toString)
.set("spark.shuffle.memoryFraction", "0.001")
resetSparkContext()
- sc = new SparkContext(conf)
+ sc = new SparkContext(myConf)
try {
sc.parallelize(0 until 100000).map(i => (i / 4, i)).groupByKey().collect()
} catch {