summaryrefslogtreecommitdiff
path: root/src/library/scala/collection/mutable/FlatHashTable.scala
diff options
context:
space:
mode:
authorPaul Phillips <paulp@improving.org>2009-12-01 18:28:55 +0000
committerPaul Phillips <paulp@improving.org>2009-12-01 18:28:55 +0000
commita3bf3f136caaefa98268607a3529b7554df5fc80 (patch)
tree220322df1130f864af06661baca1b7f4434ee32d /src/library/scala/collection/mutable/FlatHashTable.scala
parentc2359ccec521ed24641fb010774e7b39b4ae62b2 (diff)
downloadscala-a3bf3f136caaefa98268607a3529b7554df5fc80.tar.gz
scala-a3bf3f136caaefa98268607a3529b7554df5fc80.tar.bz2
scala-a3bf3f136caaefa98268607a3529b7554df5fc80.zip
[This patch submitted by ismael juma - commit m...
[This patch submitted by ismael juma - commit message his words, but condensed.] Fix ticket #1600: Serialization and deserialization of hash-based collections should not re-use hashCode. The collection is rebuilt on deserialization - note that this is not compatible with the previous serialization format. All @SerialVersionUIDs have been reset to 1. WeakHashMap is not Serializable and should not be so. TreeHashMap has not been reintegrated yet. OpenHashMap has not been updated. (I think this collection is flawed and should be removed or reimplemented.)
Diffstat (limited to 'src/library/scala/collection/mutable/FlatHashTable.scala')
-rw-r--r--src/library/scala/collection/mutable/FlatHashTable.scala55
1 files changed, 50 insertions, 5 deletions
diff --git a/src/library/scala/collection/mutable/FlatHashTable.scala b/src/library/scala/collection/mutable/FlatHashTable.scala
index 1d55933050..d06ead7888 100644
--- a/src/library/scala/collection/mutable/FlatHashTable.scala
+++ b/src/library/scala/collection/mutable/FlatHashTable.scala
@@ -27,18 +27,63 @@ trait FlatHashTable[A] {
private final val tableDebug = false
+ @transient private[collection] var _loadFactor = loadFactor
+
/** The actual hash table.
*/
- protected var table: Array[AnyRef] =
- if (initialSize == 0) null else new Array(initialSize)
+ @transient protected var table: Array[AnyRef] = new Array(initialCapacity)
/** The number of mappings contained in this hash table.
*/
- protected var tableSize = 0
+ @transient protected var tableSize = 0
/** The next size value at which to resize (capacity * load factor).
*/
- protected var threshold: Int = newThreshold(initialSize)
+ @transient protected var threshold: Int = newThreshold(initialCapacity)
+
+ import HashTable.powerOfTwo
+ private def capacity(expectedSize: Int) = if (expectedSize == 0) 1 else powerOfTwo(expectedSize)
+ private def initialCapacity = capacity(initialSize)
+
+ /**
+ * Initialises the collection from the input stream. `f` will be called for each element
+ * read from the input stream in the order determined by the stream. This is useful for
+ * structures where iteration order is important (e.g. LinkedHashSet).
+ *
+ * The serialization format expected is the one produced by `serializeTo`.
+ */
+ private[collection] def init(in: java.io.ObjectInputStream, f: A => Unit) {
+ in.defaultReadObject
+
+ _loadFactor = in.readInt
+ assert(_loadFactor > 0)
+
+ val size = in.readInt
+ assert(size >= 0)
+
+ table = new Array(capacity(size * loadFactorDenum / _loadFactor))
+ threshold = newThreshold(table.size)
+
+ var index = 0
+ while (index < size) {
+ val elem = in.readObject.asInstanceOf[A]
+ f(elem)
+ addEntry(elem)
+ index += 1
+ }
+ }
+
+ /**
+ * Serializes the collection to the output stream by saving the load factor, collection
+ * size and collection elements. `foreach` determines the order in which the elements are saved
+ * to the stream. To deserialize, `init` should be used.
+ */
+ private[collection] def serializeTo(out: java.io.ObjectOutputStream) {
+ out.defaultWriteObject
+ out.writeInt(_loadFactor)
+ out.writeInt(tableSize)
+ iterator.foreach(out.writeObject)
+ }
def findEntry(elem: A): Option[A] = {
var h = index(elemHashCode(elem))
@@ -154,7 +199,7 @@ trait FlatHashTable[A] {
protected final def index(hcode: Int) = improve(hcode) & (table.length - 1)
private def newThreshold(size: Int) = {
- val lf = loadFactor
+ val lf = _loadFactor
assert(lf < (loadFactorDenum / 2), "loadFactor too large; must be < 0.5")
(size.toLong * lf / loadFactorDenum ).toInt
}