From dbd641f592310d7243f675e27ecc8b9d47684309 Mon Sep 17 00:00:00 2001 From: Pavel Pavlov Date: Tue, 18 Sep 2012 13:01:02 +0700 Subject: pull request feedback --- src/library/scala/collection/mutable/HashMap.scala | 7 ++++--- src/library/scala/collection/mutable/HashSet.scala | 6 +++--- .../scala/collection/mutable/HashTable.scala | 8 ++++---- .../scala/collection/mutable/LinkedHashMap.scala | 4 ++-- .../scala/collection/mutable/LinkedHashSet.scala | 6 +++--- .../collection/parallel/mutable/ParHashMap.scala | 22 ++++++++++++++-------- 6 files changed, 30 insertions(+), 23 deletions(-) (limited to 'src') diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala index a407fa508c..7fc0438350 100644 --- a/src/library/scala/collection/mutable/HashMap.scala +++ b/src/library/scala/collection/mutable/HashMap.scala @@ -49,7 +49,7 @@ extends AbstractMap[A, B] type Entry = DefaultEntry[A, B] override def empty: HashMap[A, B] = HashMap.empty[A, B] - override def clear() = clearTable() + override def clear() { clearTable() } override def size: Int = tableSize def this() = this(null) @@ -57,7 +57,8 @@ extends AbstractMap[A, B] override def par = new ParHashMap[A, B](hashTableContents) // contains and apply overridden to avoid option allocations. - override def contains(key: A) = findEntry(key) != null + override def contains(key: A): Boolean = findEntry(key) != null + override def apply(key: A): B = { val result = findEntry(key) if (result eq null) default(key) @@ -126,7 +127,7 @@ extends AbstractMap[A, B] if (!isSizeMapDefined) sizeMapInitAndRebuild } else sizeMapDisable - protected override def createNewEntry[B1](key: A, value: B1): Entry = { + protected def createNewEntry[B1](key: A, value: B1): Entry = { new Entry(key, value.asInstanceOf[B]) } diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala index e7ca7284dc..f480260fac 100644 --- a/src/library/scala/collection/mutable/HashSet.scala +++ b/src/library/scala/collection/mutable/HashSet.scala @@ -53,7 +53,7 @@ extends AbstractSet[A] override def companion: GenericCompanion[HashSet] = HashSet - override def size = tableSize + override def size: Int = tableSize def contains(elem: A): Boolean = containsEntry(elem) @@ -67,9 +67,9 @@ extends AbstractSet[A] override def remove(elem: A): Boolean = removeEntry(elem).isDefined - override def clear() = clearTable() + override def clear() { clearTable() } - override def iterator = super[FlatHashTable].iterator + override def iterator: Iterator[A] = super[FlatHashTable].iterator override def foreach[U](f: A => U) { var i = 0 diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index 4a660a7e0a..e790181e5d 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -155,7 +155,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU * May be somewhat faster then `findEntry`/`addEntry` pair as it * computes entry's hash index only once. * Returns entry found in table or null. - * Method `createNewEntry` must be implemented in any subclass/subtrait who calls this. + * New entries are created by calling `createNewEntry` method. */ protected def findOrAddEntry[B](key: A, value: B): Entry = { val h = index(elemHashCode(key)) @@ -164,10 +164,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU } /** Creates new entry to be immediately inserted into the hashtable. - * This method must be implemented for any class who calls `findOrAddEntry`. - * Ideally, it should be abstract, but this will break source compatibility. + * This method is guaranteed to be called only once and in case that the entry + * will be added. In other words, an implementation may be side-effecting. */ - protected def createNewEntry[B](key: A, value: B): Entry = ??? + protected def createNewEntry[B](key: A, value: B): Entry /** Remove entry from table if present. */ diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala index 6603447810..5028884a8e 100644 --- a/src/library/scala/collection/mutable/LinkedHashMap.scala +++ b/src/library/scala/collection/mutable/LinkedHashMap.scala @@ -129,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] else Iterator.empty.next } - override def foreach[U](f: ((A, B)) => U) = { + override def foreach[U](f: ((A, B)) => U) { var cur = firstEntry while (cur ne null) { f((cur.key, cur.value)) @@ -145,7 +145,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B] } } - protected override def createNewEntry[B1](key: A, value: B1): Entry = { + protected def createNewEntry[B1](key: A, value: B1): Entry = { val e = new Entry(key, value.asInstanceOf[B]) if (firstEntry eq null) firstEntry = e else { lastEntry.later = e; e.earlier = lastEntry } diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala index 05e233a784..88bad5ff9b 100644 --- a/src/library/scala/collection/mutable/LinkedHashSet.scala +++ b/src/library/scala/collection/mutable/LinkedHashSet.scala @@ -54,7 +54,7 @@ class LinkedHashSet[A] extends AbstractSet[A] @transient protected var firstEntry: Entry = null @transient protected var lastEntry: Entry = null - override def size = tableSize + override def size: Int = tableSize def contains(elem: A): Boolean = findEntry(elem) ne null @@ -83,7 +83,7 @@ class LinkedHashSet[A] extends AbstractSet[A] else Iterator.empty.next } - override def foreach[U](f: A => U) = { + override def foreach[U](f: A => U) { var cur = firstEntry while (cur ne null) { f(cur.key) @@ -99,7 +99,7 @@ class LinkedHashSet[A] extends AbstractSet[A] } } - protected override def createNewEntry[B](key: A, dummy: B): Entry = { + protected def createNewEntry[B](key: A, dummy: B): Entry = { val e = new Entry(key) if (firstEntry eq null) firstEntry = e else { lastEntry.later = e; e.earlier = lastEntry } diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 2ed07c81a3..724b11ac47 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -67,13 +67,13 @@ self => def get(key: K): Option[V] = { val e = findEntry(key) - if (e == null) None + if (e eq null) None else Some(e.value) } def put(key: K, value: V): Option[V] = { - val e = findEntry(key) - if (e == null) { addEntry(new Entry(key, value)); None } + val e = findOrAddEntry(key, value) + if (e eq null) None else { val v = e.value; e.value = value; Some(v) } } @@ -86,9 +86,8 @@ self => } def += (kv: (K, V)): this.type = { - val e = findEntry(kv._1) - if (e == null) addEntry(new Entry(kv._1, kv._2)) - else e.value = kv._2 + val e = findOrAddEntry(kv._1, kv._2) + if (e ne null) e.value = kv._2 this } @@ -103,6 +102,10 @@ self => new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) } + protected def createNewEntry[V1](key: K, value: V1): Entry = { + new Entry(key, value.asInstanceOf[V]) + } + private def writeObject(out: java.io.ObjectOutputStream) { serializeTo(out, { entry => out.writeObject(entry.key) @@ -111,7 +114,7 @@ self => } private def readObject(in: java.io.ObjectInputStream) { - init(in, new Entry(in.readObject().asInstanceOf[K], in.readObject().asInstanceOf[V])) + init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject())) } private[parallel] override def brokenInvariants = { @@ -193,7 +196,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr // construct a normal table and fill it sequentially // TODO parallelize by keeping separate sizemaps and merging them object table extends HashTable[K, DefaultEntry[K, V]] { - def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e) + type Entry = DefaultEntry[K, V] + def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) } + def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry] sizeMapInit(table.length) } var i = 0 @@ -254,6 +259,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr assert(h >= block * blocksize && h < (block + 1) * blocksize) } } + protected def createNewEntry[X](key: K, x: X) = ??? } /* tasks */ -- cgit v1.2.3