blob: 5466c9c657fcb03b20f578ce4456aa4c5cc0c1ed (
plain) (
blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
|
package spark.rdd
import spark.{RDD, Split, TaskContext}
private[spark]
class MappedRDD[U: ClassManifest, T: ClassManifest](prev: RDD[T], f: T => U)
extends RDD[U](prev) {
override def getSplits = firstParent[T].splits
override def compute(split: Split, context: TaskContext) =
firstParent[T].iterator(split, context).map(f)
}
|