aboutsummaryrefslogtreecommitdiff
path: root/core/src/main/scala/spark/rdd/MapPartitionsWithIndexRDD.scala
blob: afb7504ba120dbc4a42beef8b001dbfc7cbb722f (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
package spark.rdd

import spark.{RDD, Partition, TaskContext}


/**
 * A variant of the MapPartitionsRDD that passes the partition index into the
 * closure. This can be used to generate or collect partition specific
 * information such as the number of tuples in a partition.
 */
private[spark]
class MapPartitionsWithIndexRDD[U: ClassManifest, T: ClassManifest](
    prev: RDD[T],
    f: (Int, Iterator[T]) => Iterator[U],
    preservesPartitioning: Boolean
  ) extends RDD[U](prev) {

  override def getPartitions: Array[Partition] = firstParent[T].partitions

  override val partitioner = if (preservesPartitioning) prev.partitioner else None

  override def compute(split: Partition, context: TaskContext) =
    f(split.index, firstParent[T].iterator(split, context))
}