aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorMark Hamstra <markhamstra@gmail.com>2013-03-05 00:48:13 -0800
committerMark Hamstra <markhamstra@gmail.com>2013-03-05 00:48:13 -0800
commitd046d8ad329b7d5812ecc5f9a4661fab5625b1b7 (patch)
tree25f235beb8c492a7b3478083b266c5dc0951fa5d /core
parent9148b968cf34b898c36a7f9672382533ee54db2d (diff)
downloadspark-d046d8ad329b7d5812ecc5f9a4661fab5625b1b7.tar.gz
spark-d046d8ad329b7d5812ecc5f9a4661fab5625b1b7.tar.bz2
spark-d046d8ad329b7d5812ecc5f9a4661fab5625b1b7.zip
whitespace formatting
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/RDD.scala20
-rw-r--r--core/src/test/scala/spark/RDDSuite.scala12
2 files changed, 16 insertions, 16 deletions
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala
index de791598a4..cc206782d0 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/spark/RDD.scala
@@ -380,7 +380,7 @@ abstract class RDD[T: ClassManifest](
val factory = factoryBuilder(index, factorySeed)
iter.map(t => f(factory(t), t))
}
- new MapPartitionsWithIndexRDD(this, sc.clean(iterF _), preservesPartitioning)
+ new MapPartitionsWithIndexRDD(this, sc.clean(iterF _), preservesPartitioning)
}
/**
@@ -391,14 +391,14 @@ abstract class RDD[T: ClassManifest](
* and a seed value of type B.
*/
def flatMapWith[A: ClassManifest, B: ClassManifest, U: ClassManifest](
- f:(A, T) => Seq[U],
- factoryBuilder: (Int, B) => (T => A),
- factorySeed: B,
- preservesPartitioning: Boolean = false): RDD[U] = {
- def iterF(index: Int, iter: Iterator[T]): Iterator[U] = {
- val factory = factoryBuilder(index, factorySeed)
- iter.flatMap(t => f(factory(t), t))
- }
+ f:(A, T) => Seq[U],
+ factoryBuilder: (Int, B) => (T => A),
+ factorySeed: B,
+ preservesPartitioning: Boolean = false): RDD[U] = {
+ def iterF(index: Int, iter: Iterator[T]): Iterator[U] = {
+ val factory = factoryBuilder(index, factorySeed)
+ iter.flatMap(t => f(factory(t), t))
+ }
new MapPartitionsWithIndexRDD(this, sc.clean(iterF _), preservesPartitioning)
}
@@ -418,7 +418,7 @@ abstract class RDD[T: ClassManifest](
val factory = factoryBuilder(index, factorySeed)
iter.filter(t => p(factory(t), t))
}
- new MapPartitionsWithIndexRDD(this, sc.clean(iterF _), preservesPartitioning)
+ new MapPartitionsWithIndexRDD(this, sc.clean(iterF _), preservesPartitioning)
}
/**
diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/spark/RDDSuite.scala
index ced8170300..b549677469 100644
--- a/core/src/test/scala/spark/RDDSuite.scala
+++ b/core/src/test/scala/spark/RDDSuite.scala
@@ -185,8 +185,8 @@ class RDDSuite extends FunSuite with LocalSparkContext {
val randoms = ones.mapWith(
(random: Double, t: Int) => random * t,
(index: Int, seed: Int) => {
- val prng = new java.util.Random(index + seed)
- (_ => prng.nextDouble)},
+ val prng = new java.util.Random(index + seed)
+ (_ => prng.nextDouble)},
42).
collect()
val prn42_3 = {
@@ -230,16 +230,16 @@ class RDDSuite extends FunSuite with LocalSparkContext {
val sample = ints.filterWith(
(random: Int, t: Int) => random == 0,
(index: Int, seed: Int) => {
- val prng = new Random(index + seed)
- (_ => prng.nextInt(3))},
+ val prng = new Random(index + seed)
+ (_ => prng.nextInt(3))},
42).
collect()
val checkSample = {
val prng42 = new Random(42)
val prng43 = new Random(43)
Array(1, 2, 3, 4, 5, 6).filter{i =>
- if (i < 4) 0 == prng42.nextInt(3)
- else 0 == prng43.nextInt(3)}
+ if (i < 4) 0 == prng42.nextInt(3)
+ else 0 == prng43.nextInt(3)}
}
assert(sample.size === checkSample.size)
for (i <- 0 until sample.size) assert(sample(i) === checkSample(i))