aboutsummaryrefslogtreecommitdiff
path: root/core/src
diff options
context:
space:
mode:
authorAnkur Dave <ankurdave@gmail.com>2014-01-10 18:00:54 -0800
committerAnkur Dave <ankurdave@gmail.com>2014-01-10 18:00:54 -0800
commit41d6586e8e0df94fee66a386c967b56c535e3c28 (patch)
tree21cf315a3a3e64b6bd3586c3b9230a173686645f /core/src
parent85a6645d318e728454e81096ca8140b5f640e782 (diff)
downloadspark-41d6586e8e0df94fee66a386c967b56c535e3c28.tar.gz
spark-41d6586e8e0df94fee66a386c967b56c535e3c28.tar.bz2
spark-41d6586e8e0df94fee66a386c967b56c535e3c28.zip
Revert changes to Spark's (PrimitiveKey)OpenHashMap; copy PKOHM to graphx
Diffstat (limited to 'core/src')
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala51
-rw-r--r--core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala57
2 files changed, 33 insertions, 75 deletions
diff --git a/core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala b/core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala
index a7a6635dec..c26f23d500 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/OpenHashMap.scala
@@ -28,20 +28,18 @@ import scala.reflect.ClassTag
*/
private[spark]
class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: ClassTag](
- val keySet: OpenHashSet[K], var _values: Array[V])
+ initialCapacity: Int)
extends Iterable[(K, V)]
with Serializable {
- /**
- * Allocate an OpenHashMap with a fixed initial capacity
- */
- def this(initialCapacity: Int = 64) =
- this(new OpenHashSet[K](initialCapacity), new Array[V](initialCapacity))
+ def this() = this(64)
- /**
- * Allocate an OpenHashMap with a fixed initial capacity
- */
- def this(keySet: OpenHashSet[K]) = this(keySet, new Array[V](keySet.capacity))
+ protected var _keySet = new OpenHashSet[K](initialCapacity)
+
+ // Init in constructor (instead of in declaration) to work around a Scala compiler specialization
+ // bug that would generate two arrays (one for Object and one for specialized T).
+ private var _values: Array[V] = _
+ _values = new Array[V](_keySet.capacity)
@transient private var _oldValues: Array[V] = null
@@ -49,14 +47,14 @@ class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: Class
private var haveNullValue = false
private var nullValue: V = null.asInstanceOf[V]
- override def size: Int = if (haveNullValue) keySet.size + 1 else keySet.size
+ override def size: Int = if (haveNullValue) _keySet.size + 1 else _keySet.size
/** Get the value for a given key */
def apply(k: K): V = {
if (k == null) {
nullValue
} else {
- val pos = keySet.getPos(k)
+ val pos = _keySet.getPos(k)
if (pos < 0) {
null.asInstanceOf[V]
} else {
@@ -71,26 +69,9 @@ class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: Class
haveNullValue = true
nullValue = v
} else {
- val pos = keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
+ val pos = _keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
_values(pos) = v
- keySet.rehashIfNeeded(k, grow, move)
- _oldValues = null
- }
- }
-
- /** Set the value for a key */
- def setMerge(k: K, v: V, mergeF: (V,V) => V) {
- if (k == null) {
- if(haveNullValue) {
- nullValue = mergeF(nullValue, v)
- } else {
- haveNullValue = true
- nullValue = v
- }
- } else {
- val pos = keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
- _values(pos) = mergeF(_values(pos), v)
- keySet.rehashIfNeeded(k, grow, move)
+ _keySet.rehashIfNeeded(k, grow, move)
_oldValues = null
}
}
@@ -111,11 +92,11 @@ class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: Class
}
nullValue
} else {
- val pos = keySet.addWithoutResize(k)
+ val pos = _keySet.addWithoutResize(k)
if ((pos & OpenHashSet.NONEXISTENCE_MASK) != 0) {
val newValue = defaultValue
_values(pos & OpenHashSet.POSITION_MASK) = newValue
- keySet.rehashIfNeeded(k, grow, move)
+ _keySet.rehashIfNeeded(k, grow, move)
newValue
} else {
_values(pos) = mergeValue(_values(pos))
@@ -137,9 +118,9 @@ class OpenHashMap[K >: Null : ClassTag, @specialized(Long, Int, Double) V: Class
}
pos += 1
}
- pos = keySet.nextPos(pos)
+ pos = _keySet.nextPos(pos)
if (pos >= 0) {
- val ret = (keySet.getValue(pos), _values(pos))
+ val ret = (_keySet.getValue(pos), _values(pos))
pos += 1
ret
} else {
diff --git a/core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala
index 1dc9f744e1..2e1ef06cbc 100644
--- a/core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala
+++ b/core/src/main/scala/org/apache/spark/util/collection/PrimitiveKeyOpenHashMap.scala
@@ -29,68 +29,45 @@ import scala.reflect._
private[spark]
class PrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
@specialized(Long, Int, Double) V: ClassTag](
- val keySet: OpenHashSet[K], var _values: Array[V])
+ initialCapacity: Int)
extends Iterable[(K, V)]
with Serializable {
- /**
- * Allocate an OpenHashMap with a fixed initial capacity
- */
- def this(initialCapacity: Int) =
- this(new OpenHashSet[K](initialCapacity), new Array[V](initialCapacity))
-
- /**
- * Allocate an OpenHashMap with a default initial capacity, providing a true
- * no-argument constructor.
- */
def this() = this(64)
- /**
- * Allocate an OpenHashMap with a fixed initial capacity
- */
- def this(keySet: OpenHashSet[K]) = this(keySet, new Array[V](keySet.capacity))
-
require(classTag[K] == classTag[Long] || classTag[K] == classTag[Int])
+ // Init in constructor (instead of in declaration) to work around a Scala compiler specialization
+ // bug that would generate two arrays (one for Object and one for specialized T).
+ protected var _keySet: OpenHashSet[K] = _
+ private var _values: Array[V] = _
+ _keySet = new OpenHashSet[K](initialCapacity)
+ _values = new Array[V](_keySet.capacity)
+
private var _oldValues: Array[V] = null
- override def size = keySet.size
+ override def size = _keySet.size
/** Get the value for a given key */
def apply(k: K): V = {
- val pos = keySet.getPos(k)
+ val pos = _keySet.getPos(k)
_values(pos)
}
/** Get the value for a given key, or returns elseValue if it doesn't exist. */
def getOrElse(k: K, elseValue: V): V = {
- val pos = keySet.getPos(k)
+ val pos = _keySet.getPos(k)
if (pos >= 0) _values(pos) else elseValue
}
/** Set the value for a key */
def update(k: K, v: V) {
- val pos = keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
+ val pos = _keySet.addWithoutResize(k) & OpenHashSet.POSITION_MASK
_values(pos) = v
- keySet.rehashIfNeeded(k, grow, move)
+ _keySet.rehashIfNeeded(k, grow, move)
_oldValues = null
}
-
- /** Set the value for a key */
- def setMerge(k: K, v: V, mergeF: (V, V) => V) {
- val pos = keySet.addWithoutResize(k)
- val ind = pos & OpenHashSet.POSITION_MASK
- if ((pos & OpenHashSet.NONEXISTENCE_MASK) != 0) { // if first add
- _values(ind) = v
- } else {
- _values(ind) = mergeF(_values(ind), v)
- }
- keySet.rehashIfNeeded(k, grow, move)
- _oldValues = null
- }
-
-
/**
* If the key doesn't exist yet in the hash map, set its value to defaultValue; otherwise,
* set its value to mergeValue(oldValue).
@@ -98,11 +75,11 @@ class PrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
* @return the newly updated value.
*/
def changeValue(k: K, defaultValue: => V, mergeValue: (V) => V): V = {
- val pos = keySet.addWithoutResize(k)
+ val pos = _keySet.addWithoutResize(k)
if ((pos & OpenHashSet.NONEXISTENCE_MASK) != 0) {
val newValue = defaultValue
_values(pos & OpenHashSet.POSITION_MASK) = newValue
- keySet.rehashIfNeeded(k, grow, move)
+ _keySet.rehashIfNeeded(k, grow, move)
newValue
} else {
_values(pos) = mergeValue(_values(pos))
@@ -116,9 +93,9 @@ class PrimitiveKeyOpenHashMap[@specialized(Long, Int) K: ClassTag,
/** Get the next value we should return from next(), or null if we're finished iterating */
def computeNextPair(): (K, V) = {
- pos = keySet.nextPos(pos)
+ pos = _keySet.nextPos(pos)
if (pos >= 0) {
- val ret = (keySet.getValue(pos), _values(pos))
+ val ret = (_keySet.getValue(pos), _values(pos))
pos += 1
ret
} else {