diff options
author | tedyu <yuzhihong@gmail.com> | 2015-11-04 10:51:40 +0000 |
---|---|---|
committer | Sean Owen <sowen@cloudera.com> | 2015-11-04 10:51:40 +0000 |
commit | c09e5139874fb3626e005c8240cca5308b902ef3 (patch) | |
tree | b98a863bc4e6472d5ac053806ff7a8762691c8dd | |
parent | 8aff36e91de0fee2f3f56c6d240bb203b5bb48ba (diff) | |
download | spark-c09e5139874fb3626e005c8240cca5308b902ef3.tar.gz spark-c09e5139874fb3626e005c8240cca5308b902ef3.tar.bz2 spark-c09e5139874fb3626e005c8240cca5308b902ef3.zip |
[SPARK-11442] Reduce numSlices for local metrics test of SparkListenerSuite
In the thread, http://search-hadoop.com/m/q3RTtcQiFSlTxeP/test+failed+due+to+OOME&subj=test+failed+due+to+OOME, it was discussed that memory consumption for SparkListenerSuite should be brought down.
This is an attempt in that direction by reducing numSlices for local metrics test.
Author: tedyu <yuzhihong@gmail.com>
Closes #9384 from tedyu/master.
-rw-r--r-- | core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala | 9 |
1 files changed, 5 insertions, 4 deletions
diff --git a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala index a9652d7e7d..53102b9f1c 100644 --- a/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala +++ b/core/src/test/scala/org/apache/spark/scheduler/SparkListenerSuite.scala @@ -212,14 +212,15 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match i } - val d = sc.parallelize(0 to 1e4.toInt, 64).map(w) + val numSlices = 16 + val d = sc.parallelize(0 to 1e3.toInt, numSlices).map(w) d.count() sc.listenerBus.waitUntilEmpty(WAIT_TIMEOUT_MILLIS) listener.stageInfos.size should be (1) val d2 = d.map { i => w(i) -> i * 2 }.setName("shuffle input 1") val d3 = d.map { i => w(i) -> (0 to (i % 5)) }.setName("shuffle input 2") - val d4 = d2.cogroup(d3, 64).map { case (k, (v1, v2)) => + val d4 = d2.cogroup(d3, numSlices).map { case (k, (v1, v2)) => w(k) -> (v1.size, v2.size) } d4.setName("A Cogroup") @@ -258,8 +259,8 @@ class SparkListenerSuite extends SparkFunSuite with LocalSparkContext with Match if (stageInfo.rddInfos.exists(_.name == d4.name)) { taskMetrics.shuffleReadMetrics should be ('defined) val sm = taskMetrics.shuffleReadMetrics.get - sm.totalBlocksFetched should be (128) - sm.localBlocksFetched should be (128) + sm.totalBlocksFetched should be (2*numSlices) + sm.localBlocksFetched should be (2*numSlices) sm.remoteBlocksFetched should be (0) sm.remoteBytesRead should be (0L) } |