From 7dd9fc67a63985493ad0482d307edd56f3af0b9d Mon Sep 17 00:00:00 2001 From: Kan Zhang Date: Sat, 14 Jun 2014 14:31:28 -0700 Subject: [SPARK-1837] NumericRange should be partitioned in the same way as other... ... sequences Author: Kan Zhang Closes #776 from kanzhang/SPARK-1837 and squashes the following commits: e48f018 [Kan Zhang] [SPARK-1837] code refactoring 67c33b5 [Kan Zhang] minor change 403f9b1 [Kan Zhang] [SPARK-1837] NumericRange should be partitioned in the same way as other sequences --- .../spark/rdd/ParallelCollectionSplitSuite.scala | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) (limited to 'core/src/test') diff --git a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala index 4df36558b6..1b112f1a41 100644 --- a/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala +++ b/core/src/test/scala/org/apache/spark/rdd/ParallelCollectionSplitSuite.scala @@ -111,6 +111,24 @@ class ParallelCollectionSplitSuite extends FunSuite with Checkers { assert(slices.forall(_.isInstanceOf[Range])) } + test("identical slice sizes between Range and NumericRange") { + val r = ParallelCollectionRDD.slice(1 to 7, 4) + val nr = ParallelCollectionRDD.slice(1L to 7L, 4) + assert(r.size === 4) + for (i <- 0 until r.size) { + assert(r(i).size === nr(i).size) + } + } + + test("identical slice sizes between List and NumericRange") { + val r = ParallelCollectionRDD.slice(List(1, 2), 4) + val nr = ParallelCollectionRDD.slice(1L to 2L, 4) + assert(r.size === 4) + for (i <- 0 until r.size) { + assert(r(i).size === nr(i).size) + } + } + test("large ranges don't overflow") { val N = 100 * 1000 * 1000 val data = 0 until N -- cgit v1.2.3