blob: 7b0b4525c7e626b803dd84c88a5fce27927a32f7 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
|
package spark.rdd
import spark.{RDD, Split, TaskContext}
private[spark]
class MapPartitionsRDD[U: ClassManifest, T: ClassManifest](
prev: RDD[T],
f: Iterator[T] => Iterator[U],
preservesPartitioning: Boolean = false)
extends RDD[U](prev) {
override val partitioner =
if (preservesPartitioning) firstParent[T].partitioner else None
override def getSplits: Array[Split] = firstParent[T].splits
override def compute(split: Split, context: TaskContext) =
f(firstParent[T].iterator(split, context))
}
|