aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorPatrick Wendell <pwendell@gmail.com>2014-03-06 17:57:31 -0800
committerPatrick Wendell <pwendell@gmail.com>2014-03-06 17:57:31 -0800
commit33baf14b04bcb5cb8dc39ae0773b9e0ef79ef9cf (patch)
tree62cc25e816865d0b339d0023afab081cf8e86068 /core
parent9ae919c02f7b7d069215e8dc6cafef0ec79c9d5f (diff)
downloadspark-33baf14b04bcb5cb8dc39ae0773b9e0ef79ef9cf.tar.gz
spark-33baf14b04bcb5cb8dc39ae0773b9e0ef79ef9cf.tar.bz2
spark-33baf14b04bcb5cb8dc39ae0773b9e0ef79ef9cf.zip
Small clean-up to flatmap tests
Diffstat (limited to 'core')
-rw-r--r--core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala11
1 files changed, 3 insertions, 8 deletions
diff --git a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
index b843b4c629..bcf138b5ee 100644
--- a/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/storage/FlatmapIteratorSuite.scala
@@ -33,34 +33,29 @@ class FlatmapIteratorSuite extends FunSuite with LocalSparkContext {
* info from the serializer, and allow old objects to be GC'd
*/
test("Flatmap Iterator to Disk") {
- val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
- .setAppName("iterator_to_disk_test")
+ val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test")
sc = new SparkContext(sconf)
val expand_size = 100
val data = sc.parallelize((1 to 5).toSeq).
flatMap( x => Stream.range(0, expand_size))
var persisted = data.persist(StorageLevel.DISK_ONLY)
- println(persisted.count())
assert(persisted.count()===500)
assert(persisted.filter(_==1).count()===5)
}
test("Flatmap Iterator to Memory") {
- val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
- .setAppName("iterator_to_disk_test")
+ val sconf = new SparkConf().setMaster("local").setAppName("iterator_to_disk_test")
sc = new SparkContext(sconf)
val expand_size = 100
val data = sc.parallelize((1 to 5).toSeq).
flatMap(x => Stream.range(0, expand_size))
var persisted = data.persist(StorageLevel.MEMORY_ONLY)
- println(persisted.count())
assert(persisted.count()===500)
assert(persisted.filter(_==1).count()===5)
}
test("Serializer Reset") {
- val sconf = new SparkConf().setMaster("local-cluster[1,1,512]")
- .setAppName("serializer_reset_test")
+ val sconf = new SparkConf().setMaster("local").setAppName("serializer_reset_test")
.set("spark.serializer.objectStreamReset", "10")
sc = new SparkContext(sconf)
val expand_size = 500