aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-10-05 22:03:06 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-10-05 22:03:06 -0700
commit716e10ca32ecb470da086290ac7414360f6e7d0a (patch)
tree7fe35a7815a9fbf1455cea22fdcf5e96ef29a15c /core/src
parent70f02fa91264296bab0b38492891a514907c23c2 (diff)
downloadspark-716e10ca32ecb470da086290ac7414360f6e7d0a.tar.gz
spark-716e10ca32ecb470da086290ac7414360f6e7d0a.tar.bz2
spark-716e10ca32ecb470da086290ac7414360f6e7d0a.zip
Minor formatting fixes
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/spark/HadoopWriter.scala2
-rw-r--r--core/src/main/scala/spark/scheduler/ShuffleMapTask.scala1
-rw-r--r--core/src/test/scala/spark/AccumulatorSuite.scala2
3 files changed, 2 insertions, 3 deletions
diff --git a/core/src/main/scala/spark/HadoopWriter.scala b/core/src/main/scala/spark/HadoopWriter.scala
index 12b6a0954c..ebb51607e6 100644
--- a/core/src/main/scala/spark/HadoopWriter.scala
+++ b/core/src/main/scala/spark/HadoopWriter.scala
@@ -42,7 +42,7 @@ class HadoopWriter(@transient jobConf: JobConf) extends Logging with Serializabl
setConfParams()
val jCtxt = getJobContext()
- getOutputCommitter().setupJob(jCtxt)
+ getOutputCommitter().setupJob(jCtxt)
}
diff --git a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala b/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
index 966a5e173a..387aac3c1f 100644
--- a/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
+++ b/core/src/main/scala/spark/scheduler/ShuffleMapTask.scala
@@ -147,7 +147,6 @@ private[spark] class ShuffleMapTask(
val blockId = "shuffle_" + dep.shuffleId + "_" + partition + "_" + i
// Get a scala iterator from java map
val iter: Iterator[(Any, Any)] = bucketIterators(i)
- // TODO: This should probably be DISK_ONLY
blockManager.put(blockId, iter, StorageLevel.DISK_ONLY, false)
}
diff --git a/core/src/test/scala/spark/AccumulatorSuite.scala b/core/src/test/scala/spark/AccumulatorSuite.scala
index 403e675f37..b43730468e 100644
--- a/core/src/test/scala/spark/AccumulatorSuite.scala
+++ b/core/src/test/scala/spark/AccumulatorSuite.scala
@@ -119,7 +119,7 @@ class AccumulatorSuite extends FunSuite with ShouldMatchers with BeforeAndAfter
import SetAccum._
val maxI = 1000
for (nThreads <- List(1, 10)) { //test single & multi-threaded
- sc = new SparkContext("local[" + nThreads + "]", "test")
+ sc = new SparkContext("local[" + nThreads + "]", "test")
val acc: Accumulable[mutable.Set[Any], Any] = sc.accumulable(new mutable.HashSet[Any]())
val groupedInts = (1 to (maxI/20)).map {x => (20 * (x - 1) to 20 * x).toSet}
val d = sc.parallelize(groupedInts)