blob: af07311b6d0385673150866bb9288e7a270c1da9 (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
|
package spark.rdd
import spark.{RDD, Partition, TaskContext}
private[spark]
class MappedRDD[U: ClassManifest, T: ClassManifest](prev: RDD[T], f: T => U)
extends RDD[U](prev) {
override def getPartitions: Array[Partition] = firstParent[T].partitions
override def compute(split: Partition, context: TaskContext) =
firstParent[T].iterator(split, context).map(f)
}
|