summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorPavel Pavlov <pavel.e.pavlov@gmail.com>2012-09-18 13:01:02 +0700
committerPavel Pavlov <pavel.e.pavlov@gmail.com>2012-09-18 13:06:19 +0700
commitdbd641f592310d7243f675e27ecc8b9d47684309 (patch)
treecc05c16fcd7dc74362b72d1f9b073206a87194da
parentc761272a82c625f1255302c42c7cd14ce2cc3d78 (diff)
downloadscala-dbd641f592310d7243f675e27ecc8b9d47684309.tar.gz
scala-dbd641f592310d7243f675e27ecc8b9d47684309.tar.bz2
scala-dbd641f592310d7243f675e27ecc8b9d47684309.zip
pull request feedback
-rw-r--r--src/library/scala/collection/mutable/HashMap.scala7
-rw-r--r--src/library/scala/collection/mutable/HashSet.scala6
-rw-r--r--src/library/scala/collection/mutable/HashTable.scala8
-rw-r--r--src/library/scala/collection/mutable/LinkedHashMap.scala4
-rw-r--r--src/library/scala/collection/mutable/LinkedHashSet.scala6
-rw-r--r--src/library/scala/collection/parallel/mutable/ParHashMap.scala22
-rw-r--r--test/files/jvm/serialization-new.check24
-rw-r--r--test/files/jvm/serialization-new.scala24
-rw-r--r--test/files/jvm/serialization.check24
-rw-r--r--test/files/jvm/serialization.scala24
-rw-r--r--test/files/run/collections.check8
-rw-r--r--test/files/run/collections.scala2
-rw-r--r--test/files/run/colltest.check1
-rw-r--r--test/files/run/colltest.scala3
-rw-r--r--test/files/run/colltest1.check2
-rw-r--r--test/files/run/colltest1.scala2
16 files changed, 139 insertions, 28 deletions
diff --git a/src/library/scala/collection/mutable/HashMap.scala b/src/library/scala/collection/mutable/HashMap.scala
index a407fa508c..7fc0438350 100644
--- a/src/library/scala/collection/mutable/HashMap.scala
+++ b/src/library/scala/collection/mutable/HashMap.scala
@@ -49,7 +49,7 @@ extends AbstractMap[A, B]
type Entry = DefaultEntry[A, B]
override def empty: HashMap[A, B] = HashMap.empty[A, B]
- override def clear() = clearTable()
+ override def clear() { clearTable() }
override def size: Int = tableSize
def this() = this(null)
@@ -57,7 +57,8 @@ extends AbstractMap[A, B]
override def par = new ParHashMap[A, B](hashTableContents)
// contains and apply overridden to avoid option allocations.
- override def contains(key: A) = findEntry(key) != null
+ override def contains(key: A): Boolean = findEntry(key) != null
+
override def apply(key: A): B = {
val result = findEntry(key)
if (result eq null) default(key)
@@ -126,7 +127,7 @@ extends AbstractMap[A, B]
if (!isSizeMapDefined) sizeMapInitAndRebuild
} else sizeMapDisable
- protected override def createNewEntry[B1](key: A, value: B1): Entry = {
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
new Entry(key, value.asInstanceOf[B])
}
diff --git a/src/library/scala/collection/mutable/HashSet.scala b/src/library/scala/collection/mutable/HashSet.scala
index e7ca7284dc..f480260fac 100644
--- a/src/library/scala/collection/mutable/HashSet.scala
+++ b/src/library/scala/collection/mutable/HashSet.scala
@@ -53,7 +53,7 @@ extends AbstractSet[A]
override def companion: GenericCompanion[HashSet] = HashSet
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = containsEntry(elem)
@@ -67,9 +67,9 @@ extends AbstractSet[A]
override def remove(elem: A): Boolean = removeEntry(elem).isDefined
- override def clear() = clearTable()
+ override def clear() { clearTable() }
- override def iterator = super[FlatHashTable].iterator
+ override def iterator: Iterator[A] = super[FlatHashTable].iterator
override def foreach[U](f: A => U) {
var i = 0
diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala
index 4a660a7e0a..e790181e5d 100644
--- a/src/library/scala/collection/mutable/HashTable.scala
+++ b/src/library/scala/collection/mutable/HashTable.scala
@@ -155,7 +155,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
* May be somewhat faster then `findEntry`/`addEntry` pair as it
* computes entry's hash index only once.
* Returns entry found in table or null.
- * Method `createNewEntry` must be implemented in any subclass/subtrait who calls this.
+ * New entries are created by calling `createNewEntry` method.
*/
protected def findOrAddEntry[B](key: A, value: B): Entry = {
val h = index(elemHashCode(key))
@@ -164,10 +164,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU
}
/** Creates new entry to be immediately inserted into the hashtable.
- * This method must be implemented for any class who calls `findOrAddEntry`.
- * Ideally, it should be abstract, but this will break source compatibility.
+ * This method is guaranteed to be called only once and in case that the entry
+ * will be added. In other words, an implementation may be side-effecting.
*/
- protected def createNewEntry[B](key: A, value: B): Entry = ???
+ protected def createNewEntry[B](key: A, value: B): Entry
/** Remove entry from table if present.
*/
diff --git a/src/library/scala/collection/mutable/LinkedHashMap.scala b/src/library/scala/collection/mutable/LinkedHashMap.scala
index 6603447810..5028884a8e 100644
--- a/src/library/scala/collection/mutable/LinkedHashMap.scala
+++ b/src/library/scala/collection/mutable/LinkedHashMap.scala
@@ -129,7 +129,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
else Iterator.empty.next
}
- override def foreach[U](f: ((A, B)) => U) = {
+ override def foreach[U](f: ((A, B)) => U) {
var cur = firstEntry
while (cur ne null) {
f((cur.key, cur.value))
@@ -145,7 +145,7 @@ class LinkedHashMap[A, B] extends AbstractMap[A, B]
}
}
- protected override def createNewEntry[B1](key: A, value: B1): Entry = {
+ protected def createNewEntry[B1](key: A, value: B1): Entry = {
val e = new Entry(key, value.asInstanceOf[B])
if (firstEntry eq null) firstEntry = e
else { lastEntry.later = e; e.earlier = lastEntry }
diff --git a/src/library/scala/collection/mutable/LinkedHashSet.scala b/src/library/scala/collection/mutable/LinkedHashSet.scala
index 05e233a784..88bad5ff9b 100644
--- a/src/library/scala/collection/mutable/LinkedHashSet.scala
+++ b/src/library/scala/collection/mutable/LinkedHashSet.scala
@@ -54,7 +54,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
@transient protected var firstEntry: Entry = null
@transient protected var lastEntry: Entry = null
- override def size = tableSize
+ override def size: Int = tableSize
def contains(elem: A): Boolean = findEntry(elem) ne null
@@ -83,7 +83,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
else Iterator.empty.next
}
- override def foreach[U](f: A => U) = {
+ override def foreach[U](f: A => U) {
var cur = firstEntry
while (cur ne null) {
f(cur.key)
@@ -99,7 +99,7 @@ class LinkedHashSet[A] extends AbstractSet[A]
}
}
- protected override def createNewEntry[B](key: A, dummy: B): Entry = {
+ protected def createNewEntry[B](key: A, dummy: B): Entry = {
val e = new Entry(key)
if (firstEntry eq null) firstEntry = e
else { lastEntry.later = e; e.earlier = lastEntry }
diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
index 2ed07c81a3..724b11ac47 100644
--- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala
+++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala
@@ -67,13 +67,13 @@ self =>
def get(key: K): Option[V] = {
val e = findEntry(key)
- if (e == null) None
+ if (e eq null) None
else Some(e.value)
}
def put(key: K, value: V): Option[V] = {
- val e = findEntry(key)
- if (e == null) { addEntry(new Entry(key, value)); None }
+ val e = findOrAddEntry(key, value)
+ if (e eq null) None
else { val v = e.value; e.value = value; Some(v) }
}
@@ -86,9 +86,8 @@ self =>
}
def += (kv: (K, V)): this.type = {
- val e = findEntry(kv._1)
- if (e == null) addEntry(new Entry(kv._1, kv._2))
- else e.value = kv._2
+ val e = findOrAddEntry(kv._1, kv._2)
+ if (e ne null) e.value = kv._2
this
}
@@ -103,6 +102,10 @@ self =>
new ParHashMapIterator(idxFrom, idxUntil, totalSz, es)
}
+ protected def createNewEntry[V1](key: K, value: V1): Entry = {
+ new Entry(key, value.asInstanceOf[V])
+ }
+
private def writeObject(out: java.io.ObjectOutputStream) {
serializeTo(out, { entry =>
out.writeObject(entry.key)
@@ -111,7 +114,7 @@ self =>
}
private def readObject(in: java.io.ObjectInputStream) {
- init(in, new Entry(in.readObject().asInstanceOf[K], in.readObject().asInstanceOf[V]))
+ init(in, createNewEntry(in.readObject().asInstanceOf[K], in.readObject()))
}
private[parallel] override def brokenInvariants = {
@@ -193,7 +196,9 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
// construct a normal table and fill it sequentially
// TODO parallelize by keeping separate sizemaps and merging them
object table extends HashTable[K, DefaultEntry[K, V]] {
- def insertEntry(e: DefaultEntry[K, V]) = if (super.findEntry(e.key) eq null) super.addEntry(e)
+ type Entry = DefaultEntry[K, V]
+ def insertEntry(e: Entry) { super.findOrAddEntry(e.key, e) }
+ def createNewEntry[E](key: K, entry: E): Entry = entry.asInstanceOf[Entry]
sizeMapInit(table.length)
}
var i = 0
@@ -254,6 +259,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr
assert(h >= block * blocksize && h < (block + 1) * blocksize)
}
}
+ protected def createNewEntry[X](key: K, x: X) = ???
}
/* tasks */
diff --git a/test/files/jvm/serialization-new.check b/test/files/jvm/serialization-new.check
index fa51c6a879..f886cfe29c 100644
--- a/test/files/jvm/serialization-new.check
+++ b/test/files/jvm/serialization-new.check
@@ -168,6 +168,30 @@ x = History()
y = History()
x equals y: true, y equals x: true
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
x = ListBuffer(white, black)
y = ListBuffer(white, black)
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization-new.scala b/test/files/jvm/serialization-new.scala
index 91eb52928f..1522fc8e27 100644
--- a/test/files/jvm/serialization-new.scala
+++ b/test/files/jvm/serialization-new.scala
@@ -285,8 +285,8 @@ object Test3_mutable {
import scala.reflect.ClassTag
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
- HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet}
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
import scala.collection.concurrent.TrieMap
// in alphabetic order
@@ -346,6 +346,26 @@ object Test3_mutable {
val h1 = new History[String, Int]
val _h1: History[String, Int] = read(write(h1))
check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
/*
// LinkedList
val ll1 = new LinkedList[Int](2, null)
diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check
index fa51c6a879..f886cfe29c 100644
--- a/test/files/jvm/serialization.check
+++ b/test/files/jvm/serialization.check
@@ -168,6 +168,30 @@ x = History()
y = History()
x equals y: true, y equals x: true
+x = Map(Linked -> 1, Hash -> 2, Map -> 3)
+y = Map(Linked -> 1, Hash -> 2, Map -> 3)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = ArrayBuffer((Linked,1), (Hash,2), (Map,3))
+y = List((Linked,1), (Hash,2), (Map,3))
+x equals y: true, y equals x: true
+
+x = Set(layers, buffers, title)
+y = Set(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = ArrayBuffer(layers, buffers, title)
+x equals y: true, y equals x: true
+
+x = ArrayBuffer(layers, buffers, title)
+y = List(layers, buffers, title)
+x equals y: true, y equals x: true
+
x = ListBuffer(white, black)
y = ListBuffer(white, black)
x equals y: true, y equals x: true
diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala
index 9c2f2acdbf..34b64938b4 100644
--- a/test/files/jvm/serialization.scala
+++ b/test/files/jvm/serialization.scala
@@ -285,8 +285,8 @@ object Test3_mutable {
import scala.reflect.ClassManifest
import scala.collection.mutable.{
ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList,
- HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue,
- Stack, StringBuilder, WrappedArray, TreeSet}
+ HashMap, HashSet, History, LinkedHashMap, LinkedHashSet, LinkedList, ListBuffer,
+ Publisher, Queue, Stack, StringBuilder, WrappedArray, TreeSet}
import scala.collection.concurrent.TrieMap
// in alphabetic order
@@ -346,6 +346,26 @@ object Test3_mutable {
val h1 = new History[String, Int]
val _h1: History[String, Int] = read(write(h1))
check(h1, _h1)
+
+ // LinkedHashMap
+ { val lhm1 = new LinkedHashMap[String, Int]
+ val list = List(("Linked", 1), ("Hash", 2), ("Map", 3))
+ lhm1 ++= list.iterator
+ val _lhm1: LinkedHashMap[String, Int] = read(write(lhm1))
+ check(lhm1, _lhm1)
+ check(lhm1.toSeq, _lhm1.toSeq) // check elements order
+ check(lhm1.toSeq, list) // check elements order
+ }
+
+ // LinkedHashSet
+ { val lhs1 = new LinkedHashSet[String]
+ val list = List("layers", "buffers", "title")
+ lhs1 ++= list.iterator
+ val _lhs1: LinkedHashSet[String] = read(write(lhs1))
+ check(lhs1, _lhs1)
+ check(lhs1.toSeq, _lhs1.toSeq) // check elements order
+ check(lhs1.toSeq, list) // check elements order
+ }
/*
// LinkedList
val ll1 = new LinkedList[Int](2, null)
diff --git a/test/files/run/collections.check b/test/files/run/collections.check
index b87a5998c5..c24150b24d 100644
--- a/test/files/run/collections.check
+++ b/test/files/run/collections.check
@@ -2,6 +2,10 @@
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashSet:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Set:
test1: 14005
test2: 25005003, iters = 5000
@@ -18,6 +22,10 @@ test3: 25005003
test1: 14005
test2: 25005003, iters = 5000
test3: 25005003
+***** mutable.LinkedHashMap:
+test1: 14005
+test2: 25005003, iters = 5000
+test3: 25005003
***** immutable.Map:
test1: 14005
test2: 25005003, iters = 5000
diff --git a/test/files/run/collections.scala b/test/files/run/collections.scala
index 60f0765e6a..69c40fae80 100644
--- a/test/files/run/collections.scala
+++ b/test/files/run/collections.scala
@@ -106,10 +106,12 @@ object Test extends App {
}
test("mutable.HashSet", new mutable.HashSet[Int], 5000)
+ test("mutable.LinkedHashSet", new mutable.LinkedHashSet[Int], 5000)
test("immutable.Set", immutable.Set[Int](), 5000)
test("immutable.ListSet", new immutable.ListSet[Int], 5000)
test("immutable.TreeSet", new immutable.TreeSet[Int], 5000)
test("mutable.HashMap", new mutable.HashMap[Int, Int], 5000)
+ test("mutable.LinkedHashMap", new mutable.LinkedHashMap[Int, Int], 5000)
test("immutable.Map", immutable.Map[Int, Int](), 5000)
test("immutable.TreeMap", new immutable.TreeMap[Int, Int], 5000)
test("immutable.ListMap", new immutable.ListMap[Int, Int], 3000)
diff --git a/test/files/run/colltest.check b/test/files/run/colltest.check
index 1ad81a1350..e5bb013ed7 100644
--- a/test/files/run/colltest.check
+++ b/test/files/run/colltest.check
@@ -5,3 +5,4 @@ false
true
false
succeeded for 10 iterations.
+succeeded for 10 iterations.
diff --git a/test/files/run/colltest.scala b/test/files/run/colltest.scala
index ecd234bdd1..703e94a3c7 100644
--- a/test/files/run/colltest.scala
+++ b/test/files/run/colltest.scala
@@ -61,5 +61,6 @@ object Test extends App {
}
t3954
- new TestSet(HashSet.empty, new scala.collection.mutable.LinkedHashSet)
+ new TestSet(HashSet.empty, new LinkedHashSet)
+ new TestSet(new ImmutableSetAdaptor(collection.immutable.Set.empty[Int]), new LinkedHashSet)
}
diff --git a/test/files/run/colltest1.check b/test/files/run/colltest1.check
index 7377174281..5ec6286d9e 100644
--- a/test/files/run/colltest1.check
+++ b/test/files/run/colltest1.check
@@ -107,3 +107,5 @@ List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K)
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
+List((A,A), (B,B), (C,C), (D,D), (E,E), (F,F), (G,G), (H,H), (I,I), (J,J), (K,K), (L,L), (M,M), (N,N), (O,O), (P,P), (Q,Q), (R,R), (S,S), (T,T), (U,U), (V,V), (W,W), (X,X), (Y,Y), (Z,Z))
diff --git a/test/files/run/colltest1.scala b/test/files/run/colltest1.scala
index 1cbd932222..54adeb7cda 100644
--- a/test/files/run/colltest1.scala
+++ b/test/files/run/colltest1.scala
@@ -226,6 +226,7 @@ object Test extends App {
setTest(mutable.Set())
setTest(immutable.Set())
setTest(mutable.HashSet())
+ setTest(mutable.LinkedHashSet())
setTest(immutable.HashSet())
mapTest(Map())
@@ -233,5 +234,6 @@ object Test extends App {
mapTest(immutable.Map())
mapTest(immutable.TreeMap())
mutableMapTest(mutable.HashMap())
+ mutableMapTest(mutable.LinkedHashMap())
mapTest(immutable.HashMap())
}