aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorReynold Xin <reynoldx@gmail.com>2013-08-19 11:02:10 -0700
committerReynold Xin <reynoldx@gmail.com>2013-08-19 11:02:10 -0700
commitacc4aa1f4701235be6eae25a9b940f36a87ea685 (patch)
tree7cbcbf4c8e7792ede206fe271075b2d0eb09ffb3 /core
parent71d705a66eb8782e5cd5c77853fdd99fd8155334 (diff)
downloadspark-acc4aa1f4701235be6eae25a9b940f36a87ea685.tar.gz
spark-acc4aa1f4701235be6eae25a9b940f36a87ea685.tar.bz2
spark-acc4aa1f4701235be6eae25a9b940f36a87ea685.zip
Added a test for sorting using MutablePair's.
Diffstat (limited to 'core')
-rw-r--r--core/src/test/scala/spark/ShuffleSuite.scala20
1 files changed, 18 insertions, 2 deletions
diff --git a/core/src/test/scala/spark/ShuffleSuite.scala b/core/src/test/scala/spark/ShuffleSuite.scala
index c319a57fdd..f1361546a3 100644
--- a/core/src/test/scala/spark/ShuffleSuite.scala
+++ b/core/src/test/scala/spark/ShuffleSuite.scala
@@ -20,9 +20,10 @@ package spark
import org.scalatest.FunSuite
import org.scalatest.matchers.ShouldMatchers
-import spark.rdd.ShuffledRDD
import spark.SparkContext._
import spark.ShuffleSuite.NonJavaSerializableClass
+import spark.rdd.OrderedRDDFunctions
+import spark.rdd.ShuffledRDD
import spark.util.MutablePair
@@ -137,12 +138,27 @@ class ShuffleSuite extends FunSuite with ShouldMatchers with LocalSparkContext {
sc = new SparkContext("local-cluster[2,1,512]", "test")
def p[T1, T2](_1: T1, _2: T2) = MutablePair(_1, _2)
val data = Array(p(1, 1), p(1, 2), p(1, 3), p(2, 1))
- val pairs: RDD[MutablePair[Int, Int]] = sc.parallelize(data)
+ val pairs: RDD[MutablePair[Int, Int]] = sc.parallelize(data, 2)
val results = new ShuffledRDD[Int, Int, MutablePair[Int, Int]](pairs, new HashPartitioner(2))
.collect()
data.foreach { pair => results should contain (pair) }
}
+
+ test("sorting using mutable pairs") {
+ // This is not in SortingSuite because of the local cluster setup.
+ // Use a local cluster with 2 processes to make sure there are both local and remote blocks
+ sc = new SparkContext("local-cluster[2,1,512]", "test")
+ def p[T1, T2](_1: T1, _2: T2) = MutablePair(_1, _2)
+ val data = Array(p(1, 11), p(3, 33), p(100, 100), p(2, 22))
+ val pairs: RDD[MutablePair[Int, Int]] = sc.parallelize(data, 2)
+ val results = new OrderedRDDFunctions[Int, Int, MutablePair[Int, Int]](pairs)
+ .sortByKey().collect()
+ results(0) should be (p(1, 11))
+ results(1) should be (p(2, 22))
+ results(2) should be (p(3, 33))
+ results(3) should be (p(100, 100))
+ }
}
object ShuffleSuite {