aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-11-27 22:27:47 -0800
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-11-27 22:27:47 -0800
commit27e43abd192440de5b10a5cc022fd5705362b276 (patch)
treefe3ac7899babc6f5c26f0dc90607820c9804e511 /core/src/test
parent59c0a9ad164ef8a6382737aa197f41e407e1c89d (diff)
downloadspark-27e43abd192440de5b10a5cc022fd5705362b276.tar.gz
spark-27e43abd192440de5b10a5cc022fd5705362b276.tar.bz2
spark-27e43abd192440de5b10a5cc022fd5705362b276.zip
Added a zip() operation for RDDs with the same shape (number of
partitions and number of elements in each partition)
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/spark/RDDSuite.scala12
1 files changed, 12 insertions, 0 deletions
diff --git a/core/src/test/scala/spark/RDDSuite.scala b/core/src/test/scala/spark/RDDSuite.scala
index 37a0ff0947..b3c820ed94 100644
--- a/core/src/test/scala/spark/RDDSuite.scala
+++ b/core/src/test/scala/spark/RDDSuite.scala
@@ -114,4 +114,16 @@ class RDDSuite extends FunSuite with BeforeAndAfter {
assert(coalesced4.glom().collect().map(_.toList).toList ===
(1 to 10).map(x => List(x)).toList)
}
+
+ test("zipped RDDs") {
+ sc = new SparkContext("local", "test")
+ val nums = sc.makeRDD(Array(1, 2, 3, 4), 2)
+ val zipped = nums.zip(nums.map(_ + 1.0))
+ assert(zipped.glom().map(_.toList).collect().toList ===
+ List(List((1, 2.0), (2, 3.0)), List((3, 4.0), (4, 5.0))))
+
+ intercept[IllegalArgumentException] {
+ nums.zip(sc.parallelize(1 to 4, 1)).collect()
+ }
+ }
}