aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMatei Zaharia <matei@eecs.berkeley.edu>2012-10-05 16:36:16 -0700
committerMatei Zaharia <matei@eecs.berkeley.edu>2012-10-05 16:36:16 -0700
commit7eae2d1229695dc7cf053750a59f9c8857142373 (patch)
treeee756c54c93e5f936e84750d95f6a9b8efb671b1
parent66d7066d4f2820230fc0bccd639ed7091b7336a4 (diff)
parentd9bcc2a6e3b86bfcab1c7455521df395ba483fc6 (diff)
downloadspark-7eae2d1229695dc7cf053750a59f9c8857142373.tar.gz
spark-7eae2d1229695dc7cf053750a59f9c8857142373.tar.bz2
spark-7eae2d1229695dc7cf053750a59f9c8857142373.zip
Merge branch 'master' into reduce-logging
-rw-r--r--core/src/main/scala/spark/RDD.scala2
-rw-r--r--core/src/test/scala/spark/BoundedMemoryCacheSuite.scala13
-rw-r--r--core/src/test/scala/spark/SizeEstimatorSuite.scala28
3 files changed, 30 insertions, 13 deletions
diff --git a/core/src/main/scala/spark/RDD.scala b/core/src/main/scala/spark/RDD.scala
index ee0ace1585..371583d496 100644
--- a/core/src/main/scala/spark/RDD.scala
+++ b/core/src/main/scala/spark/RDD.scala
@@ -256,7 +256,7 @@ abstract class RDD[T: ClassManifest](@transient sc: SparkContext) extends Serial
}
def saveAsObjectFile(path: String) {
- this.glom
+ this.mapPartitions(iter => iter.grouped(10).map(_.toArray))
.map(x => (NullWritable.get(), new BytesWritable(Utils.serialize(x))))
.saveAsSequenceFile(path)
}
diff --git a/core/src/test/scala/spark/BoundedMemoryCacheSuite.scala b/core/src/test/scala/spark/BoundedMemoryCacheSuite.scala
index dff2970566..1ea1075bbe 100644
--- a/core/src/test/scala/spark/BoundedMemoryCacheSuite.scala
+++ b/core/src/test/scala/spark/BoundedMemoryCacheSuite.scala
@@ -2,8 +2,9 @@ package spark
import org.scalatest.FunSuite
import org.scalatest.PrivateMethodTester
+import org.scalatest.matchers.ShouldMatchers
-class BoundedMemoryCacheSuite extends FunSuite with PrivateMethodTester {
+class BoundedMemoryCacheSuite extends FunSuite with PrivateMethodTester with ShouldMatchers {
test("constructor test") {
val cache = new BoundedMemoryCache(60)
expect(60)(cache.getCapacity)
@@ -22,15 +23,21 @@ class BoundedMemoryCacheSuite extends FunSuite with PrivateMethodTester {
logInfo("Dropping key (%s, %d) of size %d to make space".format(datasetId, partition, entry.size))
}
}
+
+ // NOTE: The String class definition changed in JDK 7 to exclude the int fields count and length
+ // This means that the size of strings will be lesser by 8 bytes in JDK 7 compared to JDK 6.
+ // http://mail.openjdk.java.net/pipermail/core-libs-dev/2012-May/010257.html
+ // Work around to check for either.
+
//should be OK
- expect(CachePutSuccess(56))(cache.put("1", 0, "Meh"))
+ cache.put("1", 0, "Meh") should (equal (CachePutSuccess(56)) or equal (CachePutSuccess(48)))
//we cannot add this to cache (there is not enough space in cache) & we cannot evict the only value from
//cache because it's from the same dataset
expect(CachePutFailure())(cache.put("1", 1, "Meh"))
//should be OK, dataset '1' can be evicted from cache
- expect(CachePutSuccess(56))(cache.put("2", 0, "Meh"))
+ cache.put("2", 0, "Meh") should (equal (CachePutSuccess(56)) or equal (CachePutSuccess(48)))
//should fail, cache should obey it's capacity
expect(CachePutFailure())(cache.put("3", 0, "Very_long_and_useless_string"))
diff --git a/core/src/test/scala/spark/SizeEstimatorSuite.scala b/core/src/test/scala/spark/SizeEstimatorSuite.scala
index a2015644ee..7677ac6db5 100644
--- a/core/src/test/scala/spark/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/spark/SizeEstimatorSuite.scala
@@ -3,6 +3,7 @@ package spark
import org.scalatest.FunSuite
import org.scalatest.BeforeAndAfterAll
import org.scalatest.PrivateMethodTester
+import org.scalatest.matchers.ShouldMatchers
class DummyClass1 {}
@@ -19,7 +20,8 @@ class DummyClass4(val d: DummyClass3) {
val x: Int = 0
}
-class SizeEstimatorSuite extends FunSuite with BeforeAndAfterAll with PrivateMethodTester {
+class SizeEstimatorSuite extends FunSuite
+ with BeforeAndAfterAll with PrivateMethodTester with ShouldMatchers {
var oldArch: String = _
var oldOops: String = _
@@ -42,11 +44,15 @@ class SizeEstimatorSuite extends FunSuite with BeforeAndAfterAll with PrivateMet
expect(48)(SizeEstimator.estimate(new DummyClass4(new DummyClass3)))
}
+ // NOTE: The String class definition changed in JDK 7 to exclude the int fields count and length.
+ // This means that the size of strings will be lesser by 8 bytes in JDK 7 compared to JDK 6.
+ // http://mail.openjdk.java.net/pipermail/core-libs-dev/2012-May/010257.html
+ // Work around to check for either.
test("strings") {
- expect(48)(SizeEstimator.estimate(""))
- expect(56)(SizeEstimator.estimate("a"))
- expect(56)(SizeEstimator.estimate("ab"))
- expect(64)(SizeEstimator.estimate("abcdefgh"))
+ SizeEstimator.estimate("") should (equal (48) or equal (40))
+ SizeEstimator.estimate("a") should (equal (56) or equal (48))
+ SizeEstimator.estimate("ab") should (equal (56) or equal (48))
+ SizeEstimator.estimate("abcdefgh") should (equal(64) or equal(56))
}
test("primitive arrays") {
@@ -106,6 +112,10 @@ class SizeEstimatorSuite extends FunSuite with BeforeAndAfterAll with PrivateMet
resetOrClear("os.arch", arch)
}
+ // NOTE: The String class definition changed in JDK 7 to exclude the int fields count and length.
+ // This means that the size of strings will be lesser by 8 bytes in JDK 7 compared to JDK 6.
+ // http://mail.openjdk.java.net/pipermail/core-libs-dev/2012-May/010257.html
+ // Work around to check for either.
test("64-bit arch with no compressed oops") {
val arch = System.setProperty("os.arch", "amd64")
val oops = System.setProperty("spark.test.useCompressedOops", "false")
@@ -113,10 +123,10 @@ class SizeEstimatorSuite extends FunSuite with BeforeAndAfterAll with PrivateMet
val initialize = PrivateMethod[Unit]('initialize)
SizeEstimator invokePrivate initialize()
- expect(64)(SizeEstimator.estimate(""))
- expect(72)(SizeEstimator.estimate("a"))
- expect(72)(SizeEstimator.estimate("ab"))
- expect(80)(SizeEstimator.estimate("abcdefgh"))
+ SizeEstimator.estimate("") should (equal (64) or equal (56))
+ SizeEstimator.estimate("a") should (equal (72) or equal (64))
+ SizeEstimator.estimate("ab") should (equal (72) or equal (64))
+ SizeEstimator.estimate("abcdefgh") should (equal (80) or equal (72))
resetOrClear("os.arch", arch)
resetOrClear("spark.test.useCompressedOops", oops)