diff options
author | Matei Zaharia <matei@eecs.berkeley.edu> | 2010-11-08 08:49:42 -0800 |
---|---|---|
committer | Matei Zaharia <matei@eecs.berkeley.edu> | 2010-11-08 08:49:42 -0800 |
commit | 504f839c65b2e8d174a4925c74889fb215cc4809 (patch) | |
tree | ddd8088d74aaffd3aa8a51b2fa6c4abbd107ab00 /src | |
parent | 9d3f05a990beacadea00c68f9cf7ff82f93b0a44 (diff) | |
download | spark-504f839c65b2e8d174a4925c74889fb215cc4809.tar.gz spark-504f839c65b2e8d174a4925c74889fb215cc4809.tar.bz2 spark-504f839c65b2e8d174a4925c74889fb215cc4809.zip |
Removed unnecessary collectAsMap
Diffstat (limited to 'src')
-rw-r--r-- | src/scala/spark/LocalFileShuffle.scala | 6 |
1 files changed, 2 insertions, 4 deletions
diff --git a/src/scala/spark/LocalFileShuffle.scala b/src/scala/spark/LocalFileShuffle.scala index db6ae322f1..b5b5e7267d 100644 --- a/src/scala/spark/LocalFileShuffle.scala +++ b/src/scala/spark/LocalFileShuffle.scala @@ -7,8 +7,6 @@ import java.util.concurrent.atomic.AtomicLong import scala.collection.mutable.{ArrayBuffer, HashMap} -import spark.SparkContext._ - /** * A simple implementation of shuffle using local files served through HTTP. @@ -32,7 +30,7 @@ class LocalFileShuffle[K, V, C] extends Shuffle[K, V, C] with Logging { val numInputSplits = splitRdd.splits.size // Run a parallel map and collect to write the intermediate data files, - // returning a hash table of inputSplitId -> serverUri pairs + // returning a list of inputSplitId -> serverUri pairs val outputLocs = splitRdd.map((pair: (Int, Iterator[(K, V)])) => { val myIndex = pair._1 val myIterator = pair._2 @@ -55,7 +53,7 @@ class LocalFileShuffle[K, V, C] extends Shuffle[K, V, C] with Logging { out.close() } (myIndex, LocalFileShuffle.serverUri) - }).collectAsMap() + }).collect() // Build a hashmap from server URI to list of splits (to facillitate // fetching all the URIs on a server within a single connection) |