aboutsummaryrefslogtreecommitdiff
path: root/core
diff options
context:
space:
mode:
authorShivaram Venkataraman <shivaram@eecs.berkeley.edu>2012-08-12 17:16:27 -0700
committerShivaram Venkataraman <shivaram@eecs.berkeley.edu>2012-08-13 13:31:45 -0700
commit1ea269110c0a3814120dc27d194e03852d26fb50 (patch)
treef22849996c635912cfacd7bd1171241bebaf8787 /core
parent44661df9ccab0bfcbf7059840f1bfd2fbee8b052 (diff)
downloadspark-1ea269110c0a3814120dc27d194e03852d26fb50.tar.gz
spark-1ea269110c0a3814120dc27d194e03852d26fb50.tar.bz2
spark-1ea269110c0a3814120dc27d194e03852d26fb50.zip
Move object size and pointer size initialization into a function to enable unit-testing
Diffstat (limited to 'core')
-rw-r--r--core/src/main/scala/spark/SizeEstimator.scala52
1 files changed, 31 insertions, 21 deletions
diff --git a/core/src/main/scala/spark/SizeEstimator.scala b/core/src/main/scala/spark/SizeEstimator.scala
index 45f9a1cd40..e5ad8b52dc 100644
--- a/core/src/main/scala/spark/SizeEstimator.scala
+++ b/core/src/main/scala/spark/SizeEstimator.scala
@@ -34,33 +34,43 @@ object SizeEstimator extends Logging {
private val FLOAT_SIZE = 4
private val DOUBLE_SIZE = 8
+ // Alignment boundary for objects
+ // TODO: Is this arch dependent ?
+ private val ALIGN_SIZE = 8
+
+ // A cache of ClassInfo objects for each class
+ private val classInfos = new ConcurrentHashMap[Class[_], ClassInfo]
+
// Object and pointer sizes are arch dependent
- val is64bit = System.getProperty("os.arch").contains("64")
+ private var is64bit = false
// Size of an object reference
- val isCompressedOops = getIsCompressedOops
-
// Based on https://wikis.oracle.com/display/HotSpotInternals/CompressedOops
- // section, "Which oops are compressed"
+ private var isCompressedOops = false
+ private var pointerSize = 4
// Minimum size of a java.lang.Object
- val OBJECT_SIZE = if (!is64bit) 8 else {
- if(!isCompressedOops) {
- 16
- } else {
- 12
- }
- }
+ private var objectSize = 8
- val POINTER_SIZE = if (is64bit && !isCompressedOops) 8 else 4
+ initialize()
- // Alignment boundary for objects
- // TODO: Is this arch dependent ?
- private val ALIGN_SIZE = 8
+ // Sets object size, pointer size based on architecture and CompressedOops settings
+ // from the JVM.
+ private def initialize() {
+ is64bit = System.getProperty("os.arch").contains("64")
+ isCompressedOops = getIsCompressedOops
- // A cache of ClassInfo objects for each class
- private val classInfos = new ConcurrentHashMap[Class[_], ClassInfo]
- classInfos.put(classOf[Object], new ClassInfo(OBJECT_SIZE, Nil))
+ objectSize = if (!is64bit) 8 else {
+ if(!isCompressedOops) {
+ 16
+ } else {
+ 12
+ }
+ }
+ pointerSize = if (is64bit && !isCompressedOops) 8 else 4
+ classInfos.clear()
+ classInfos.put(classOf[Object], new ClassInfo(objectSize, Nil))
+ }
private def getIsCompressedOops : Boolean = {
if (System.getProperty("spark.test.useCompressedOops") != null) {
@@ -154,13 +164,13 @@ object SizeEstimator extends Logging {
val elementClass = cls.getComponentType
// Arrays have object header and length field which is an integer
- var arrSize: Long = alignSize(OBJECT_SIZE + INT_SIZE)
+ var arrSize: Long = alignSize(objectSize + INT_SIZE)
if (elementClass.isPrimitive) {
arrSize += alignSize(length * primitiveSize(elementClass))
state.size += arrSize
} else {
- arrSize += alignSize(length * POINTER_SIZE)
+ arrSize += alignSize(length * pointerSize)
state.size += arrSize
if (length <= ARRAY_SIZE_FOR_SAMPLING) {
@@ -228,7 +238,7 @@ object SizeEstimator extends Logging {
shellSize += primitiveSize(fieldClass)
} else {
field.setAccessible(true) // Enable future get()'s on this field
- shellSize += POINTER_SIZE
+ shellSize += pointerSize
pointerFields = field :: pointerFields
}
}