aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorTathagata Das <tathagata.das1565@gmail.com>2012-12-11 15:36:12 -0800
committerTathagata Das <tathagata.das1565@gmail.com>2012-12-11 15:36:12 -0800
commit8e74fac215e8b9cda7e35111c5116e3669c6eb97 (patch)
treecb460c247109d8028aadc1f7d112d35f4f204ffc /core/src/test
parentfa28f25619d6712e5f920f498ec03085ea208b4d (diff)
downloadspark-8e74fac215e8b9cda7e35111c5116e3669c6eb97.tar.gz
spark-8e74fac215e8b9cda7e35111c5116e3669c6eb97.tar.bz2
spark-8e74fac215e8b9cda7e35111c5116e3669c6eb97.zip
Made checkpoint data in RDDs optional to further reduce serialized size.
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/spark/CheckpointSuite.scala12
1 files changed, 6 insertions, 6 deletions
diff --git a/core/src/test/scala/spark/CheckpointSuite.scala b/core/src/test/scala/spark/CheckpointSuite.scala
index 909c55c91c..0bffedb8db 100644
--- a/core/src/test/scala/spark/CheckpointSuite.scala
+++ b/core/src/test/scala/spark/CheckpointSuite.scala
@@ -57,7 +57,7 @@ class CheckpointSuite extends FunSuite with BeforeAndAfter with Logging {
assert(sc.objectFile[Int](parCollection.getCheckpointFile.get).collect() === result)
assert(parCollection.dependencies != Nil)
assert(parCollection.splits.length === numSplits)
- assert(parCollection.splits.toList === parCollection.checkpointData.cpRDDSplits.toList)
+ assert(parCollection.splits.toList === parCollection.checkpointData.get.cpRDDSplits.toList)
assert(parCollection.collect() === result)
}
@@ -72,7 +72,7 @@ class CheckpointSuite extends FunSuite with BeforeAndAfter with Logging {
assert(sc.objectFile[String](blockRDD.getCheckpointFile.get).collect() === result)
assert(blockRDD.dependencies != Nil)
assert(blockRDD.splits.length === numSplits)
- assert(blockRDD.splits.toList === blockRDD.checkpointData.cpRDDSplits.toList)
+ assert(blockRDD.splits.toList === blockRDD.checkpointData.get.cpRDDSplits.toList)
assert(blockRDD.collect() === result)
}
@@ -84,7 +84,7 @@ class CheckpointSuite extends FunSuite with BeforeAndAfter with Logging {
}
test("UnionRDD") {
- def otherRDD = sc.makeRDD(1 to 10, 4)
+ def otherRDD = sc.makeRDD(1 to 10, 1)
// Test whether the size of UnionRDDSplits reduce in size after parent RDD is checkpointed.
// Current implementation of UnionRDD has transient reference to parent RDDs,
@@ -191,7 +191,7 @@ class CheckpointSuite extends FunSuite with BeforeAndAfter with Logging {
assert(operatedRDD.dependencies.head.rdd != parentRDD)
// Test whether the splits have been changed to the new Hadoop splits
- assert(operatedRDD.splits.toList === operatedRDD.checkpointData.cpRDDSplits.toList)
+ assert(operatedRDD.splits.toList === operatedRDD.checkpointData.get.cpRDDSplits.toList)
// Test whether the number of splits is same as before
assert(operatedRDD.splits.length === numSplits)
@@ -289,8 +289,8 @@ class CheckpointSuite extends FunSuite with BeforeAndAfter with Logging {
*/
def generateLongLineageRDD(): RDD[Int] = {
var rdd = sc.makeRDD(1 to 100, 4)
- for (i <- 1 to 20) {
- rdd = rdd.map(x => x)
+ for (i <- 1 to 50) {
+ rdd = rdd.map(x => x + 1)
}
rdd
}