aboutsummaryrefslogtreecommitdiff
path: root/core/src/test/scala
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2013-11-25 18:50:18 -0800
committerMatei Zaharia <matei@eecs.berkeley.edu>2013-11-25 18:50:18 -0800
commit14bb465bb3d65f5b1034ada85cfcad7460034073 (patch)
treeb0b7fe5a5943b234176bab66d4d9518daf887b4f /core/src/test/scala
parenteb4296c8f7561aaf8782479dd5cd7c9320b7fa6b (diff)
parente9ff13ec72718ada705b85cc10da1b09bcc86dcc (diff)
downloadspark-14bb465bb3d65f5b1034ada85cfcad7460034073.tar.gz
spark-14bb465bb3d65f5b1034ada85cfcad7460034073.tar.bz2
spark-14bb465bb3d65f5b1034ada85cfcad7460034073.zip
Merge pull request #201 from rxin/mappartitions
Use the proper partition index in mapPartitionsWIthIndex mapPartitionsWithIndex uses TaskContext.partitionId as the partition index. TaskContext.partitionId used to be identical to the partition index in a RDD. However, pull request #186 introduced a scenario (with partition pruning) that the two can be different. This pull request uses the right partition index in all mapPartitionsWithIndex related calls. Also removed the extra MapPartitionsWIthContextRDD and put all the mapPartitions related functionality in MapPartitionsRDD.
Diffstat (limited to 'core/src/test/scala')
-rw-r--r--core/src/test/scala/org/apache/spark/CheckpointSuite.scala2
1 files changed, 0 insertions, 2 deletions
diff --git a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
index f26c44d3e7..d2226aa5a5 100644
--- a/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/org/apache/spark/CheckpointSuite.scala
@@ -62,8 +62,6 @@ class CheckpointSuite extends FunSuite with LocalSparkContext with Logging {
testCheckpointing(_.sample(false, 0.5, 0))
testCheckpointing(_.glom())
testCheckpointing(_.mapPartitions(_.map(_.toString)))
- testCheckpointing(r => new MapPartitionsWithContextRDD(r,
- (context: TaskContext, iter: Iterator[Int]) => iter.map(_.toString), false ))
testCheckpointing(_.map(x => (x % 2, 1)).reduceByKey(_ + _).mapValues(_.toString))
testCheckpointing(_.map(x => (x % 2, 1)).reduceByKey(_ + _).flatMapValues(x => 1 to x))
testCheckpointing(_.pipe(Seq("cat")))