aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorKay Ousterhout <kayousterhout@gmail.com>2015-04-08 10:26:45 -0700
committerJosh Rosen <joshrosen@databricks.com>2015-04-08 10:27:07 -0700
commit3b655680c4f5c71c903af5a71c96447e03350f93 (patch)
tree88652cbd3d5e3493e66ffe2759277a1e7dbe70f9
parente967ecacad8075ef521fbc1a501e074c861d0fe7 (diff)
downloadspark-3b655680c4f5c71c903af5a71c96447e03350f93.tar.gz
spark-3b655680c4f5c71c903af5a71c96447e03350f93.tar.bz2
spark-3b655680c4f5c71c903af5a71c96447e03350f93.zip
[SPARK-6753] Clone SparkConf in ShuffleSuite tests
Prior to this change, the unit test for SPARK-3426 did not clone the original SparkConf, which meant that that test did not use the options set by suites that subclass ShuffleSuite.scala. This commit fixes that problem. JoshRosen would be great if you could take a look at this, since you wrote this test originally. Author: Kay Ousterhout <kayousterhout@gmail.com> Closes #5401 from kayousterhout/SPARK-6753 and squashes the following commits: 368c540 [Kay Ousterhout] [SPARK-6753] Clone SparkConf in ShuffleSuite tests (cherry picked from commit 9d44ddce1d1e19011026605549c37d0db6d6afa1) Signed-off-by: Josh Rosen <joshrosen@databricks.com>
-rw-r--r--core/src/test/scala/org/apache/spark/ShuffleSuite.scala4
1 files changed, 2 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
index f57921b768..30b6184c77 100644
--- a/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/org/apache/spark/ShuffleSuite.scala
@@ -242,14 +242,14 @@ abstract class ShuffleSuite extends FunSuite with Matchers with LocalSparkContex
shuffleSpillCompress <- Set(true, false);
shuffleCompress <- Set(true, false)
) {
- val conf = new SparkConf()
+ val myConf = conf.clone()
.setAppName("test")
.setMaster("local")
.set("spark.shuffle.spill.compress", shuffleSpillCompress.toString)
.set("spark.shuffle.compress", shuffleCompress.toString)
.set("spark.shuffle.memoryFraction", "0.001")
resetSparkContext()
- sc = new SparkContext(conf)
+ sc = new SparkContext(myConf)
try {
sc.parallelize(0 until 100000).map(i => (i / 4, i)).groupByKey().collect()
} catch {