aboutsummaryrefslogtreecommitdiff
path: root/core/src/test
diff options
context:
space:
mode:
Diffstat (limited to 'core/src/test')
-rw-r--r--core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala47
-rw-r--r--core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala10
2 files changed, 49 insertions, 8 deletions
diff --git a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
index 28915bd533..133a76f28e 100644
--- a/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/SizeEstimatorSuite.scala
@@ -36,6 +36,15 @@ class DummyClass4(val d: DummyClass3) {
val x: Int = 0
}
+// dummy class to show class field blocks alignment.
+class DummyClass5 extends DummyClass1 {
+ val x: Boolean = true
+}
+
+class DummyClass6 extends DummyClass5 {
+ val y: Boolean = true
+}
+
object DummyString {
def apply(str: String) : DummyString = new DummyString(str.toArray)
}
@@ -50,6 +59,7 @@ class SizeEstimatorSuite
override def beforeEach() {
// Set the arch to 64-bit and compressedOops to true to get a deterministic test-case
+ super.beforeEach()
System.setProperty("os.arch", "amd64")
System.setProperty("spark.test.useCompressedOops", "true")
}
@@ -62,6 +72,22 @@ class SizeEstimatorSuite
assertResult(48)(SizeEstimator.estimate(new DummyClass4(new DummyClass3)))
}
+ test("primitive wrapper objects") {
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Boolean(true)))
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Byte("1")))
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Character('1')))
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Short("1")))
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Integer(1)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Long(1)))
+ assertResult(16)(SizeEstimator.estimate(new java.lang.Float(1.0)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Double(1.0d)))
+ }
+
+ test("class field blocks rounding") {
+ assertResult(16)(SizeEstimator.estimate(new DummyClass5))
+ assertResult(24)(SizeEstimator.estimate(new DummyClass6))
+ }
+
// NOTE: The String class definition varies across JDK versions (1.6 vs. 1.7) and vendors
// (Sun vs IBM). Use a DummyString class to make tests deterministic.
test("strings") {
@@ -102,18 +128,18 @@ class SizeEstimatorSuite
val arr = new Array[Char](100000)
assertResult(200016)(SizeEstimator.estimate(arr))
assertResult(480032)(SizeEstimator.estimate(Array.fill(10000)(new DummyString(arr))))
-
+
val buf = new ArrayBuffer[DummyString]()
for (i <- 0 until 5000) {
buf.append(new DummyString(new Array[Char](10)))
}
assertResult(340016)(SizeEstimator.estimate(buf.toArray))
-
+
for (i <- 0 until 5000) {
buf.append(new DummyString(arr))
}
assertResult(683912)(SizeEstimator.estimate(buf.toArray))
-
+
// If an array contains the *same* element many times, we should only count it once.
val d1 = new DummyClass1
// 10 pointers plus 8-byte object
@@ -155,5 +181,20 @@ class SizeEstimatorSuite
assertResult(64)(SizeEstimator.estimate(DummyString("a")))
assertResult(64)(SizeEstimator.estimate(DummyString("ab")))
assertResult(72)(SizeEstimator.estimate(DummyString("abcdefgh")))
+
+ // primitive wrapper classes
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Boolean(true)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Byte("1")))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Character('1')))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Short("1")))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Integer(1)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Long(1)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Float(1.0)))
+ assertResult(24)(SizeEstimator.estimate(new java.lang.Double(1.0d)))
+ }
+
+ test("class field blocks rounding on 64-bit VM without useCompressedOops") {
+ assertResult(24)(SizeEstimator.estimate(new DummyClass5))
+ assertResult(32)(SizeEstimator.estimate(new DummyClass6))
}
}
diff --git a/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala b/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
index 20fd22b78e..7a98723bc6 100644
--- a/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
+++ b/core/src/test/scala/org/apache/spark/util/collection/ExternalSorterSuite.scala
@@ -377,7 +377,7 @@ class ExternalSorterSuite extends FunSuite with LocalSparkContext with PrivateMe
val sorter = new ExternalSorter[Int, Int, Int](
None, Some(new HashPartitioner(3)), Some(ord), None)
assertDidNotBypassMergeSort(sorter)
- sorter.insertAll((0 until 100000).iterator.map(i => (i, i)))
+ sorter.insertAll((0 until 120000).iterator.map(i => (i, i)))
assert(diskBlockManager.getAllFiles().length > 0)
sorter.stop()
assert(diskBlockManager.getAllBlocks().length === 0)
@@ -385,9 +385,9 @@ class ExternalSorterSuite extends FunSuite with LocalSparkContext with PrivateMe
val sorter2 = new ExternalSorter[Int, Int, Int](
None, Some(new HashPartitioner(3)), Some(ord), None)
assertDidNotBypassMergeSort(sorter2)
- sorter2.insertAll((0 until 100000).iterator.map(i => (i, i)))
+ sorter2.insertAll((0 until 120000).iterator.map(i => (i, i)))
assert(diskBlockManager.getAllFiles().length > 0)
- assert(sorter2.iterator.toSet === (0 until 100000).map(i => (i, i)).toSet)
+ assert(sorter2.iterator.toSet === (0 until 120000).map(i => (i, i)).toSet)
sorter2.stop()
assert(diskBlockManager.getAllBlocks().length === 0)
}
@@ -428,8 +428,8 @@ class ExternalSorterSuite extends FunSuite with LocalSparkContext with PrivateMe
None, Some(new HashPartitioner(3)), Some(ord), None)
assertDidNotBypassMergeSort(sorter)
intercept[SparkException] {
- sorter.insertAll((0 until 100000).iterator.map(i => {
- if (i == 99990) {
+ sorter.insertAll((0 until 120000).iterator.map(i => {
+ if (i == 119990) {
throw new SparkException("Intentional failure")
}
(i, i)