diff options
Diffstat (limited to 'src/library/scala/collection/immutable')
24 files changed, 1043 insertions, 475 deletions
diff --git a/src/library/scala/collection/immutable/DefaultMap.scala b/src/library/scala/collection/immutable/DefaultMap.scala new file mode 100755 index 0000000000..7ee8197150 --- /dev/null +++ b/src/library/scala/collection/immutable/DefaultMap.scala @@ -0,0 +1,53 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2010, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +// $Id: DefaultMap.scala 20028 2009-12-07 11:49:19Z cunei $ + + +package scala.collection +package immutable + +import generic._ + +/** <p> + * A default map which implements the <code>updated</code> and <code>-</code> + * methods of maps.<br/> + * Instances that inherit from <code>DefaultMap[A, B]</code> still have to + * define: + * </p><pre> + * <b>def</b> get(key: A): Option[B] + * <b>def</b> iterator: Iterator[(A, B)]</pre> + * <p> + * It refers back to the original map. + * </p> + * <p> + * It might also be advisable to override <code>foreach</code> or + * <code>size</code> if efficient implementations can be found. + * </p> + * + * @since 2.8 + */ +trait DefaultMap[A, +B] extends Map[A, B] { self => + + /** A default implementation which creates a new immutable map. + */ + override def +[B1 >: B](kv: (A, B1)): Map[A, B1] = { + val b = Map.newBuilder[A, B1] + b ++= this + b += ((kv._1, kv._2)) + b.result + } + + /** A default implementation which creates a new immutable map. + */ + override def - (key: A): Map[A, B] = { + val b = newBuilder + b ++= this filter (key !=) + b.result + } +} diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 2215e22f71..e0f801546c 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -16,184 +16,383 @@ import generic._ import annotation.unchecked.uncheckedVariance /** <p> - * This class implements immutable maps using a hash table. - * </p> - * <p> - * It is optimized for sequential accesses where the last updated table is - * accessed most often. It supports with reasonable efficiency accesses to - * previous versions of the table by keeping a change log that's regularly - * compacted. It needs to synchronize most methods, so it is less suitable - * for highly concurrent accesses. + * This class implements immutable maps using a hash trie. * </p> * * @note the builder of a hash map returns specialized representations EmptyMap,Map1,..., Map4 * for maps of size <= 4. * * @author Martin Odersky - * @version 2.0, 19/01/2007 + * @author Tiark Rompf + * @version 2.8 * @since 2.3 */ -@serializable @SerialVersionUID(1L) -class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] with mutable.HashTable[A] { +@serializable @SerialVersionUID(2L) +class HashMap[A, +B] extends Map[A,B] with MapLike[A, B, HashMap[A, B]] { - type Entry = scala.collection.mutable.DefaultEntry[A, Any] - - @transient protected var later: HashMap[A, B @uncheckedVariance] = null - @transient protected var oldKey: A = _ - @transient protected var oldValue: Option[B @uncheckedVariance] = _ - @transient protected var deltaSize: Int = _ + override def size: Int = 0 override def empty = HashMap.empty[A, B] - def get(key: A): Option[B] = synchronized { - var m: HashMap[A, _ >: B] = this - var cnt = 0 - while (m.later != null) { - if (key == m.oldKey) return m.oldValue.asInstanceOf[Option[B]] - cnt += 1 - m = m.later - } - if (cnt > logLimit) makeCopy(m) - val e = m.findEntry(key) - if (e == null) None - else Some(getValue(e)) - } + def iterator: Iterator[(A,B)] = Iterator.empty - override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = synchronized { - makeCopyIfUpdated() - val e = findEntry(key) - if (e == null) { - markUpdated(key, None, 1) - later.addEntry(new Entry(key, value)) - } else { - markUpdated(key, Some(getValue(e)), 0) - e.value = value - } - later.asInstanceOf[HashMap[A, B1]] - } + override def foreach[U](f: ((A, B)) => U): Unit = { } + + def get(key: A): Option[B] = + get0(key, computeHash(key), 0) + + override def updated [B1 >: B] (key: A, value: B1): HashMap[A, B1] = + updated0(key, computeHash(key), 0, value, null) + + override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = + updated0(kv._1, computeHash(kv._1), 0, kv._2, kv) - /** Add a key/value pair to this map. - * @param kv the key/value pair - * @return A new map with the new binding added to this map - */ - override def + [B1 >: B] (kv: (A, B1)): HashMap[A, B1] = updated(kv._1, kv._2) - - /** Adds two or more elements to this collection and returns - * either the collection itself (if it is mutable), or a new collection - * with the added elements. - * - * @param elem1 the first element to add. - * @param elem2 the second element to add. - * @param elems the remaining elements to add. - */ override def + [B1 >: B] (elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *): HashMap[A, B1] = this + elem1 + elem2 ++ elems + // TODO: optimize (might be able to use mutable updates) - def - (key: A): HashMap[A, B] = synchronized { - makeCopyIfUpdated() - val e = findEntry(key) - if (e == null) this - else { - markUpdated(key, Some(getValue(e)), -1) - later removeEntry key - later.asInstanceOf[HashMap[A, B]] - } - } + def - (key: A): HashMap[A, B] = + removed0(key, computeHash(key), 0) - override def size: Int = synchronized { - var m: HashMap[A, _ >: B] = this - var cnt = 0 - var s = 0 - while (m.later != null) { - s -= m.deltaSize - cnt += 1 - m = m.later - } - s += m.tableSize - if (cnt > logLimit) makeCopy(m) - s - } + protected def elemHashCode(key: A) = if (key == null) 0 else key.hashCode() - def iterator = synchronized { - makeCopyIfUpdated() - entriesIterator map {e => (e.key, getValue(e))} + protected final def improve(hcode: Int) = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) } - private def getValue(e: Entry) = - e.value.asInstanceOf[B] - - private def logLimit: Int = math.sqrt(table.length).toInt - - private[this] def markUpdated(key: A, ov: Option[B], delta: Int) { - val lf = loadFactor - later = new HashMap[A, B] { - override def initialSize = 0 - /* We need to do this to avoid a reference to the outer HashMap */ - def _newLoadFactor = lf - override def loadFactor = _newLoadFactor - table = HashMap.this.table - tableSize = HashMap.this.tableSize - threshold = HashMap.this.threshold - } - oldKey = key - oldValue = ov - deltaSize = delta - } + protected def computeHash(key: A) = improve(elemHashCode(key)) - private def makeCopy(last: HashMap[A, _ >: B]) { - def undo(m: HashMap[A, _ >: B]) { - if (m ne last) { - undo(m.later) - if (m.deltaSize == 1) removeEntry(m.oldKey) - else if (m.deltaSize == 0) findEntry(m.oldKey).value = m.oldValue.get - else if (m.deltaSize == -1) addEntry(new Entry(m.oldKey, m.oldValue.get)) - } - } - def copy(e: Entry): Entry = - if (e == null) null - else { - val rest = copy(e.next) - val result = new Entry(e.key, e.value) - result.next = rest - result - } - val ltable = last.table - val s = ltable.length - table = new scala.Array[collection.mutable.HashEntry[A, Entry]](s) - var i = 0 - while (i < s) { - table(i) = copy(ltable(i).asInstanceOf[Entry]) - i += 1 - } - tableSize = last.tableSize - threshold = last.threshold - undo(this) - later = null - } + protected def get0(key: A, hash: Int, level: Int): Option[B] = None - private def makeCopyIfUpdated() { - var m: HashMap[A, _ >: B] = this - while (m.later != null) m = m.later - if (m ne this) makeCopy(m) - } + protected def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = + new HashMap.HashMap1(key, hash, value, kv) - private def writeObject(out: java.io.ObjectOutputStream) { - serializeTo(out, _.value) - } - private def readObject(in: java.io.ObjectInputStream) { - init[B](in, new Entry(_, _)) - } + + protected def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = this + } /** A factory object for immutable HashMaps. * * @author Martin Odersky + * @author Tiark Rompf * @version 2.8 * @since 2.3 */ object HashMap extends ImmutableMapFactory[HashMap] { implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), HashMap[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: HashMap[A, B] = new HashMap + def empty[A, B]: HashMap[A, B] = EmptyHashMap.asInstanceOf[HashMap[A, B]] + + private object EmptyHashMap extends HashMap[Any,Nothing] { + + } + + // TODO: add HashMap2, HashMap3, ... + + class HashMap1[A,+B](private var key: A, private[HashMap] var hash: Int, private var value: (B @uncheckedVariance), private var kv: (A,B @uncheckedVariance)) extends HashMap[A,B] { + override def size = 1 + + override def get0(key: A, hash: Int, level: Int): Option[B] = + if (hash == this.hash && key == this.key) Some(value) else None + + override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = + if (hash == this.hash && key == this.key) new HashMap1(key, hash, value, kv) + else { + if (hash != this.hash) { + //new HashTrieMap[A,B1](level+5, this, new HashMap1(key, hash, value, kv)) + val m = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) // TODO: could save array alloc + m.updated0(this.key, this.hash, level, this.value, this.kv).updated0(key, hash, level, value, kv) + } else { + // 32-bit hash collision (rare, but not impossible) + // wrap this in a HashTrieMap if called with level == 0 (otherwise serialization won't work) + if (level == 0) { + val elems = new Array[HashMap[A,B1]](1) + elems(0) = new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value)) + new HashTrieMap[A,B1](1 << ((hash >>> level) & 0x1f), elems, 2) + } else { + new HashMapCollision1(hash, ListMap.empty.updated(this.key,this.value).updated(key,value)) + } + } + } + + override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = + if (hash == this.hash && key == this.key) HashMap.empty[A,B] else this + + override def iterator: Iterator[(A,B)] = Iterator(ensurePair) + override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair) + private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv } + + private def writeObject(out: java.io.ObjectOutputStream) { + out.writeObject(key) + out.writeObject(value) + } + + private def readObject(in: java.io.ObjectInputStream) { + key = in.readObject().asInstanceOf[A] + value = in.readObject().asInstanceOf[B] + hash = computeHash(key) + } + + } + + private class HashMapCollision1[A,+B](private[HashMap] var hash: Int, var kvs: ListMap[A,B @uncheckedVariance]) extends HashMap[A,B] { + override def size = kvs.size + + override def get0(key: A, hash: Int, level: Int): Option[B] = + if (hash == this.hash) kvs.get(key) else None + + override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = + if (hash == this.hash) new HashMapCollision1(hash, kvs.updated(key, value)) + else { + var m: HashMap[A,B1] = new HashTrieMap[A,B1](0,new Array[HashMap[A,B1]](0),0) + // might be able to save some ops here, but it doesn't seem to be worth it + for ((k,v) <- kvs) + m = m.updated0(k, this.hash, level, v, null) + m.updated0(key, hash, level, value, kv) + } + + override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = + if (hash == this.hash) { + val kvs1 = kvs - key + if (!kvs1.isEmpty) + new HashMapCollision1(hash, kvs1) + else + HashMap.empty[A,B] + } else this + + override def iterator: Iterator[(A,B)] = kvs.iterator + override def foreach[U](f: ((A, B)) => U): Unit = kvs.foreach(f) + + private def writeObject(out: java.io.ObjectOutputStream) { + // this cannot work - reading things in might produce different + // hash codes and remove the collision. however this is never called + // because no references to this class are ever handed out to client code + // and HashTrieMap serialization takes care of the situation + error("cannot serialize an immutable.HashMap where all items have the same 32-bit hash code") + //out.writeObject(kvs) + } + + private def readObject(in: java.io.ObjectInputStream) { + error("cannot deserialize an immutable.HashMap where all items have the same 32-bit hash code") + //kvs = in.readObject().asInstanceOf[ListMap[A,B]] + //hash = computeHash(kvs.) + } + + } + + + class HashTrieMap[A,+B](private var bitmap: Int, private var elems: Array[HashMap[A,B @uncheckedVariance]], + private var size0: Int) extends HashMap[A,B] { +/* + def this (level: Int, m1: HashMap1[A,B], m2: HashMap1[A,B]) = { + this(((m1.hash >>> level) & 0x1f) | ((m2.hash >>> level) & 0x1f), { + val idx1 = (m1.hash >>> level) & 0x1f + val idx2 = (m2.hash >>> level) & 0x1f + assert(idx1 != idx2, m1.hash + "==" + m2.hash + " at level " + level) // TODO + val elems = new Array[HashMap[A,B]](2) + if (idx1 < idx2) { + elems(0) = m1 + elems(1) = m2 + } else { + elems(0) = m2 + elems(1) = m1 + } + elems + }, 2) + } +*/ + override def size = size0 + + override def get0(key: A, hash: Int, level: Int): Option[B] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + if (bitmap == - 1) { + elems(index & 0x1f).get0(key, hash, level + 5) + } else if ((bitmap & mask) != 0) { + val offset = Integer.bitCount(bitmap & (mask-1)) + // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) + elems(offset).get0(key, hash, level + 5) + } else + None + } + + override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1)): HashMap[A, B1] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if ((bitmap & mask) != 0) { + val elemsNew = new Array[HashMap[A,B1]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + val sub = elems(offset) + // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) + val subNew = sub.updated0(key, hash, level + 5, value, kv) + elemsNew(offset) = subNew + new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + } else { + val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) + Array.copy(elems, 0, elemsNew, 0, offset) + elemsNew(offset) = new HashMap1(key, hash, value, kv) + Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) + val bitmapNew = bitmap | mask + new HashTrieMap(bitmapNew, elemsNew, size + 1) + } + } + + override def removed0(key: A, hash: Int, level: Int): HashMap[A, B] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if (((bitmap >>> index) & 1) == 1) { + val elemsNew = new Array[HashMap[A,B]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + val sub = elems(offset) + // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) + val subNew = sub.removed0(key, hash, level + 5) + elemsNew(offset) = subNew + // TODO: handle shrinking + val sizeNew = size + (subNew.size - sub.size) + if (sizeNew > 0) + new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + else + HashMap.empty[A,B] + } else { + this + } + } + +/* + override def iterator = { // TODO: optimize (use a stack to keep track of pos) + + def iter(m: HashTrieMap[A,B], k: => Stream[(A,B)]): Stream[(A,B)] = { + def horiz(elems: Array[HashMap[A,B]], i: Int, k: => Stream[(A,B)]): Stream[(A,B)] = { + if (i < elems.length) { + elems(i) match { + case m: HashTrieMap[A,B] => iter(m, horiz(elems, i+1, k)) + case m: HashMap1[A,B] => new Stream.Cons(m.ensurePair, horiz(elems, i+1, k)) + } + } else k + } + horiz(m.elems, 0, k) + } + iter(this, Stream.empty).iterator + } +*/ + + + override def iterator = new Iterator[(A,B)] { + private[this] var depth = 0 + private[this] var arrayStack = new Array[Array[HashMap[A,B]]](6) + private[this] var posStack = new Array[Int](6) + + private[this] var arrayD = elems + private[this] var posD = 0 + + private[this] var subIter: Iterator[(A,B)] = null // to traverse collision nodes + + def hasNext = (subIter ne null) || depth >= 0 + + def next: (A,B) = { + if (subIter ne null) { + val el = subIter.next + if (!subIter.hasNext) + subIter = null + el + } else + next0(arrayD, posD) + } + + @scala.annotation.tailrec private[this] def next0(elems: Array[HashMap[A,B]], i: Int): (A,B) = { + if (i == elems.length-1) { // reached end of level, pop stack + depth -= 1 + if (depth >= 0) { + arrayD = arrayStack(depth) + posD = posStack(depth) + arrayStack(depth) = null + } else { + arrayD = null + posD = 0 + } + } else + posD += 1 + + elems(i) match { + case m: HashTrieMap[A,B] => // push current pos onto stack and descend + if (depth >= 0) { + arrayStack(depth) = arrayD + posStack(depth) = posD + } + depth += 1 + arrayD = m.elems + posD = 0 + next0(m.elems, 0) + case m: HashMap1[A,B] => m.ensurePair + case m => + subIter = m.iterator + subIter.next + } + } + } + +/* + +import collection.immutable._ +def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) } +var mOld = OldHashMap.empty[Int,Int] +var mNew = HashMap.empty[Int,Int] +time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) } +time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) } +time { for (i <- 0 until 100000) mOld = mOld.updated(i,i) } +time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) } +time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) } +time { for (i <- 0 until 100000) mNew = mNew.updated(i,i) } +time { mOld.iterator.foreach( p => ()) } +time { mOld.iterator.foreach( p => ()) } +time { mOld.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } + +*/ + + + override def foreach[U](f: ((A, B)) => U): Unit = { + var i = 0; + while (i < elems.length) { + elems(i).foreach(f) + i += 1 + } + } + + + private def writeObject(out: java.io.ObjectOutputStream) { + // no out.defaultWriteObject() + out.writeInt(size) + foreach { p => + out.writeObject(p._1) + out.writeObject(p._2) + } + } + + private def readObject(in: java.io.ObjectInputStream) { + val size = in.readInt + var index = 0 + var m = HashMap.empty[A,B] + while (index < size) { + // TODO: optimize (use unsafe mutable update) + m = m + ((in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B])) + index += 1 + } + var tm = m.asInstanceOf[HashTrieMap[A,B]] + bitmap = tm.bitmap + elems = tm.elems + size0 = tm.size0 + } + + } + } diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 2320187be9..16d4473de1 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -13,148 +13,353 @@ package scala.collection package immutable import generic._ +import annotation.unchecked.uncheckedVariance /** <p> - * This class implements immutable sets using a hash table. - * </p> - * <p> - * It is optimized for sequential accesses where the last updated table is - * accessed most often. It supports with reasonable efficiency accesses to - * previous versions of the table by keeping a change log that's regularly - * compacted. It needs to synchronize most methods, so it is less suitable - * for highly concurrent accesses. + * This class implements immutable sets using a hash trie. * </p> * * @note the builder of a hash set returns specialized representations EmptySet,Set1,..., Set4 * for sets of size <= 4. * * @author Martin Odersky + * @author Tiark Rompf * @version 2.8 * @since 2.3 */ -@serializable @SerialVersionUID(1L) +@serializable @SerialVersionUID(2L) class HashSet[A] extends Set[A] with GenericSetTemplate[A, HashSet] - with SetLike[A, HashSet[A]] - with mutable.FlatHashTable[A] { + with SetLike[A, HashSet[A]] { override def companion: GenericCompanion[HashSet] = HashSet - @transient protected var later: HashSet[A] = null - @transient protected var changedElem: A = _ - @transient protected var deleted: Boolean = _ - - def contains(elem: A): Boolean = synchronized { - var m = this - var cnt = 0 - while (m.later != null) { - if (elem == m.changedElem) return m.deleted - cnt += 1 - m = m.later - } - if (cnt > logLimit) makeCopy(m) - m.containsEntry(elem) - } + //class HashSet[A] extends Set[A] with SetLike[A, HashSet[A]] { - def + (elem: A): HashSet[A] = synchronized { - makeCopyIfUpdated() - if (containsEntry(elem)) this - else { - markUpdated(elem, false) - later addEntry elem - later - } - } + override def size: Int = 0 - def - (elem: A): HashSet[A] = synchronized { - makeCopyIfUpdated() - if (!containsEntry(elem)) this - else { - markUpdated(elem, true) - later removeEntry elem - later - } - } + override def empty = HashSet.empty[A] - override def size: Int = synchronized { - var m = this - var cnt = 0 - var s = 0 - while (m.later != null) { - if (m.deleted) s += 1 else s -= 1 - cnt += 1 - m = m.later - } - s += m.tableSize - if (cnt > logLimit) makeCopy(m) - s - } + def iterator: Iterator[A] = Iterator.empty - override def iterator = synchronized { - makeCopyIfUpdated() - // note need to cache because (later versions of) set might be mutated while elements are traversed. - val cached = new mutable.ArrayBuffer() ++= super.iterator - cached.iterator - } + override def foreach[U](f: A => U): Unit = { } - private def logLimit: Int = math.sqrt(table.length).toInt - - private def markUpdated(elem: A, del: Boolean) { - val lf = loadFactor - later = new HashSet[A] { - override def initialSize = 0 - /* We need to do this to avoid a reference to the outer HashMap */ - def _newLoadFactor = lf - override def loadFactor = _newLoadFactor - table = HashSet.this.table - tableSize = HashSet.this.tableSize - threshold = HashSet.this.threshold - } - changedElem = elem - deleted = del - } + def contains(e: A): Boolean = get0(e, computeHash(e), 0) - private def makeCopy(last: HashSet[A]) { - def undo(m: HashSet[A]) { - if (m.deleted) addEntry(m.changedElem) - else removeEntry(m.changedElem) - } - table = new scala.Array[AnyRef](last.table.length) - scala.Array.copy(last.table, 0, table, 0, table.length) - tableSize = last.tableSize - threshold = last.threshold - - // we need to work from the end of the list but non-tail-recursion - // potentially blows the stack, so instead we create a stack on the heap. - // See ticket #408. - val toUndo = new mutable.Stack[HashSet[A]] - toUndo pushAll ((Iterator iterate this)(_.later) takeWhile (_ ne last)) - toUndo foreach undo - later = null - } + override def + (e: A): HashSet[A] = updated0(e, computeHash(e), 0) - private def makeCopyIfUpdated() { - var m = this - while (m.later != null) m = m.later - if (m ne this) makeCopy(m) - } + override def + (elem1: A, elem2: A, elems: A*): HashSet[A] = + this + elem1 + elem2 ++ elems + // TODO: optimize (might be able to use mutable updates) - private def writeObject(s: java.io.ObjectOutputStream) { - serializeTo(s) - } + def - (e: A): HashSet[A] = + removed0(e, computeHash(e), 0) + + protected def elemHashCode(key: A) = if (key == null) 0 else key.hashCode() - private def readObject(in: java.io.ObjectInputStream) { - init(in, x => x) + protected final def improve(hcode: Int) = { + var h: Int = hcode + ~(hcode << 9) + h = h ^ (h >>> 14) + h = h + (h << 4) + h ^ (h >>> 10) } + + protected def computeHash(key: A) = improve(elemHashCode(key)) + + protected def get0(key: A, hash: Int, level: Int): Boolean = false + + protected def updated0(key: A, hash: Int, level: Int): HashSet[A] = + new HashSet.HashSet1(key, hash) + + + + protected def removed0(key: A, hash: Int, level: Int): HashSet[A] = this + } +/* +object HashSet extends SetFactory[HashSet] { + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A] + override def empty[A]: HashSet[A] = new HashSet +} +*/ + + /** A factory object for immutable HashSets. * * @author Martin Odersky + * @author Tiark Rompf * @version 2.8 * @since 2.3 */ object HashSet extends SetFactory[HashSet] { implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, HashSet[A]] = setCanBuildFrom[A] - override def empty[A]: HashSet[A] = new HashSet + override def empty[A]: HashSet[A] = EmptyHashSet.asInstanceOf[HashSet[A]] + + private object EmptyHashSet extends HashSet[Any] { + } + + // TODO: add HashSet2, HashSet3, ... + + class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] { + override def size = 1 + + override def get0(key: A, hash: Int, level: Int): Boolean = + (hash == this.hash && key == this.key) + + override def updated0(key: A, hash: Int, level: Int): HashSet[A] = + if (hash == this.hash && key == this.key) this + else { + if (hash != this.hash) { + //new HashTrieSet[A](level+5, this, new HashSet1(key, hash)) + val m = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) // TODO: could save array alloc + m.updated0(this.key, this.hash, level).updated0(key, hash, level) + } else { + // 32-bit hash collision (rare, but not impossible) + // wrap this in a HashTrieSet if called with level == 0 (otherwise serialization won't work) + if (level == 0) { + val elems = new Array[HashSet[A]](1) + elems(0) = new HashSetCollision1(hash, ListSet.empty + this.key + key) + new HashTrieSet[A](1 << ((hash >>> level) & 0x1f), elems, 2) + } else { + new HashSetCollision1(hash, ListSet.empty + this.key + key) + } + } + } + + override def removed0(key: A, hash: Int, level: Int): HashSet[A] = + if (hash == this.hash && key == this.key) HashSet.empty[A] else this + + override def iterator: Iterator[A] = Iterator(key) + override def foreach[U](f: A => U): Unit = f(key) + + private def writeObject(out: java.io.ObjectOutputStream) { + out.writeObject(key) + } + + private def readObject(in: java.io.ObjectInputStream) { + key = in.readObject().asInstanceOf[A] + hash = computeHash(key) + } + + } + + private class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A]) extends HashSet[A] { + override def size = ks.size + + override def get0(key: A, hash: Int, level: Int): Boolean = + if (hash == this.hash) ks.contains(key) else false + + override def updated0(key: A, hash: Int, level: Int): HashSet[A] = + if (hash == this.hash) new HashSetCollision1(hash, ks + key) + else { + var m: HashSet[A] = new HashTrieSet[A](0,new Array[HashSet[A]](0),0) + // might be able to save some ops here, but it doesn't seem to be worth it + for (k <- ks) + m = m.updated0(k, this.hash, level) + m.updated0(key, hash, level) + } + + override def removed0(key: A, hash: Int, level: Int): HashSet[A] = + if (hash == this.hash) { + val ks1 = ks - key + if (!ks1.isEmpty) + new HashSetCollision1(hash, ks1) + else + HashSet.empty[A] + } else this + + override def iterator: Iterator[A] = ks.iterator + override def foreach[U](f: A => U): Unit = ks.foreach(f) + + private def writeObject(out: java.io.ObjectOutputStream) { + // this cannot work - reading things in might produce different + // hash codes and remove the collision. however this is never called + // because no references to this class are ever handed out to client code + // and HashTrieSet serialization takes care of the situation + error("cannot serialize an immutable.HashSet where all items have the same 32-bit hash code") + //out.writeObject(kvs) + } + + private def readObject(in: java.io.ObjectInputStream) { + error("cannot deserialize an immutable.HashSet where all items have the same 32-bit hash code") + //kvs = in.readObject().asInstanceOf[ListSet[A]] + //hash = computeHash(kvs.) + } + + } + + + class HashTrieSet[A](private var bitmap: Int, private var elems: Array[HashSet[A]], + private var size0: Int) extends HashSet[A] { + + override def size = size0 + + override def get0(key: A, hash: Int, level: Int): Boolean = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + if (bitmap == - 1) { + elems(index & 0x1f).get0(key, hash, level + 5) + } else if ((bitmap & mask) != 0) { + val offset = Integer.bitCount(bitmap & (mask-1)) + // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site) + elems(offset).get0(key, hash, level + 5) + } else + false + } + + override def updated0(key: A, hash: Int, level: Int): HashSet[A] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if ((bitmap & mask) != 0) { + val elemsNew = new Array[HashSet[A]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + val sub = elems(offset) + // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site) + val subNew = sub.updated0(key, hash, level + 5) + elemsNew(offset) = subNew + new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size)) + } else { + val elemsNew = new Array[HashSet[A]](elems.length + 1) + Array.copy(elems, 0, elemsNew, 0, offset) + elemsNew(offset) = new HashSet1(key, hash) + Array.copy(elems, offset, elemsNew, offset + 1, elems.length - offset) + val bitmapNew = bitmap | mask + new HashTrieSet(bitmapNew, elemsNew, size + 1) + } + } + + override def removed0(key: A, hash: Int, level: Int): HashSet[A] = { + val index = (hash >>> level) & 0x1f + val mask = (1 << index) + val offset = Integer.bitCount(bitmap & (mask-1)) + if (((bitmap >>> index) & 1) == 1) { + val elemsNew = new Array[HashSet[A]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + val sub = elems(offset) + // TODO: might be worth checking if sub is HashTrieSet (-> monomorphic call site) + val subNew = sub.removed0(key, hash, level + 5) + elemsNew(offset) = subNew + // TODO: handle shrinking + val sizeNew = size + (subNew.size - sub.size) + if (sizeNew > 0) + new HashTrieSet(bitmap, elemsNew, size + (subNew.size - sub.size)) + else + HashSet.empty[A] + } else { + this + } + } + + override def iterator = new Iterator[A] { + private[this] var depth = 0 + private[this] var arrayStack = new Array[Array[HashSet[A]]](6) + private[this] var posStack = new Array[Int](6) + + private[this] var arrayD = elems + private[this] var posD = 0 + + private[this] var subIter: Iterator[A] = null // to traverse collision nodes + + def hasNext = (subIter ne null) || depth >= 0 + + def next: A = { + if (subIter ne null) { + val el = subIter.next + if (!subIter.hasNext) + subIter = null + el + } else + next0(arrayD, posD) + } + + @scala.annotation.tailrec private[this] def next0(elems: Array[HashSet[A]], i: Int): A = { + if (i == elems.length-1) { // reached end of level, pop stack + depth -= 1 + if (depth >= 0) { + arrayD = arrayStack(depth) + posD = posStack(depth) + arrayStack(depth) = null + } else { + arrayD = null + posD = 0 + } + } else + posD += 1 + + elems(i) match { + case m: HashTrieSet[A] => // push current pos onto stack and descend + if (depth >= 0) { + arrayStack(depth) = arrayD + posStack(depth) = posD + } + depth += 1 + arrayD = m.elems + posD = 0 + next0(m.elems, 0) + case m: HashSet1[A] => m.key + case m => + subIter = m.iterator + subIter.next + } + } + } + +/* + +import collection.immutable._ +def time(block: =>Unit) = { val t0 = System.nanoTime; block; println("elapsed: " + (System.nanoTime - t0)/1000000.0) } +var mOld = OldHashSet.empty[Int] +var mNew = HashSet.empty[Int] +time { for (i <- 0 until 100000) mOld = mOld + i } +time { for (i <- 0 until 100000) mOld = mOld + i } +time { for (i <- 0 until 100000) mOld = mOld + i } +time { for (i <- 0 until 100000) mNew = mNew + i } +time { for (i <- 0 until 100000) mNew = mNew + i } +time { for (i <- 0 until 100000) mNew = mNew + i } +time { mOld.iterator.foreach( p => ()) } +time { mOld.iterator.foreach( p => ()) } +time { mOld.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } +time { mNew.iterator.foreach( p => ()) } + +*/ + + + override def foreach[U](f: A => U): Unit = { + var i = 0; + while (i < elems.length) { + elems(i).foreach(f) + i += 1 + } + } + + + private def writeObject(out: java.io.ObjectOutputStream) { + // no out.defaultWriteObject() + out.writeInt(size) + foreach { e => + out.writeObject(e) + } + } + + private def readObject(in: java.io.ObjectInputStream) { + val size = in.readInt + var index = 0 + var m = HashSet.empty[A] + while (index < size) { + // TODO: optimize (use unsafe mutable update) + m = m + in.readObject.asInstanceOf[A] + index += 1 + } + var tm = m.asInstanceOf[HashTrieSet[A]] + bitmap = tm.bitmap + elems = tm.elems + size0 = tm.size0 + } + + } + } diff --git a/src/library/scala/collection/immutable/IndexedSeq.scala b/src/library/scala/collection/immutable/IndexedSeq.scala index 0d7b1b0d23..3f29052808 100644 --- a/src/library/scala/collection/immutable/IndexedSeq.scala +++ b/src/library/scala/collection/immutable/IndexedSeq.scala @@ -14,8 +14,9 @@ package immutable import generic._ import mutable.{ArrayBuffer, Builder} -/** A subtrait of <code>collection.IndexedSeq</code> which represents sequences +/** A subtrait of <code>collection.IndexedSeq</code> which represents indexed sequences * that cannot be mutated. + * $indexedSeqInfo * * @since 2.8 */ @@ -36,5 +37,5 @@ object IndexedSeq extends SeqFactory[IndexedSeq] { def apply(idx: Int) = buf.apply(idx) } implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, IndexedSeq[A]] = new GenericCanBuildFrom[A] - def newBuilder[A]: Builder[A, IndexedSeq[A]] = new ArrayBuffer[A] mapResult (buf => new Impl(buf)) -}
\ No newline at end of file + def newBuilder[A]: Builder[A, IndexedSeq[A]] = Vector.newBuilder[A] +} diff --git a/src/library/scala/collection/immutable/IntMap.scala b/src/library/scala/collection/immutable/IntMap.scala index 52451e6012..62309a9f48 100644 --- a/src/library/scala/collection/immutable/IntMap.scala +++ b/src/library/scala/collection/immutable/IntMap.scala @@ -151,7 +151,10 @@ import IntMap._ * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Integer Maps</a> * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers. * + * Note: This class is as of 2.8 largely superseded by HashMap. + * * @since 2.7 + * */ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap[T]] { override def empty: IntMap[T] = IntMap.Nil; @@ -357,7 +360,7 @@ sealed abstract class IntMap[+T] extends Map[Int, T] with MapLike[Int, T, IntMap } /** - * Forms the intersection of these two maps with a combinining function. The resulting map is + * Forms the intersection of these two maps with a combining function. The resulting map is * a map that has only keys present in both maps and has values produced from the original mappings * by combining them with f. * diff --git a/src/library/scala/collection/immutable/LinearSeq.scala b/src/library/scala/collection/immutable/LinearSeq.scala index c1efea037c..016afd4508 100644 --- a/src/library/scala/collection/immutable/LinearSeq.scala +++ b/src/library/scala/collection/immutable/LinearSeq.scala @@ -17,7 +17,7 @@ import mutable.Builder /** A subtrait of <code>collection.LinearSeq</code> which represents sequences * that cannot be mutated. - * + * $linearSeqInfo * @since 2.8 */ trait LinearSeq[+A] extends Seq[A] diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index 2088f3ac78..2b91ab8852 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -46,7 +46,7 @@ import annotation.tailrec sealed abstract class List[+A] extends LinearSeq[A] with Product with GenericTraversableTemplate[A, List] - with LinearSeqLike[A, List[A]] { + with LinearSeqOptimized[A, List[A]] { override def companion: GenericCompanion[List] = List import scala.collection.{Iterable, Traversable, Seq, IndexedSeq} @@ -61,7 +61,7 @@ sealed abstract class List[+A] extends LinearSeq[A] * @param x the element to prepend. * @return a list which contains `x` as first element and * which continues with this list. - * @ex `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)` + * @example `1 :: List(2, 3) = List(2, 3).::(1) = List(1, 2, 3)` * @usecase def ::(x: A): List[A] */ def ::[B >: A] (x: B): List[B] = @@ -71,7 +71,7 @@ sealed abstract class List[+A] extends LinearSeq[A] * @param prefix The list elements to prepend. * @return a list resulting from the concatenation of the given * list `prefix` and this list. - * @ex `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)` + * @example `List(1, 2) ::: List(3, 4) = List(3, 4).:::(List(1, 2)) = List(1, 2, 3, 4)` * @usecase def :::(prefix: List[A]): List[A] */ def :::[B >: A](prefix: List[B]): List[B] = @@ -133,16 +133,18 @@ sealed abstract class List[+A] extends LinearSeq[A] loop(this) } - // Overridden methods from IterableLike or overloaded variants of such methods + // Overridden methods from IterableLike and SeqLike or overloaded variants of such methods - override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { + override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = { val b = bf(this) if (b.isInstanceOf[ListBuffer[_]]) (this ::: that.toList).asInstanceOf[That] else super.++(that) } - override def ++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[List[A], B, That]): That = - this ++ that.toList + override def +:[B >: A, That](elem: B)(implicit bf: CanBuildFrom[List[A], B, That]): That = bf match { + case _: List.GenericCanBuildFrom[_] => (elem :: this).asInstanceOf[That] + case _ => super.+:(elem)(bf) + } override def toList: List[A] = this @@ -288,6 +290,9 @@ sealed abstract class List[+A] extends LinearSeq[A] b.toList } + @deprecated("use `distinct' instead") + def removeDuplicates: List[A] = distinct + /** <p> * Sort the list according to the comparison function * `lt(e1: a, e2: a) => Boolean`, @@ -299,7 +304,7 @@ sealed abstract class List[+A] extends LinearSeq[A] * @param lt the comparison function * @return a list sorted according to the comparison function * `lt(e1: a, e2: a) => Boolean`. - * @ex <pre> + * @example <pre> * List("Steve", "Tom", "John", "Bob") * .sort((e1, e2) => (e1 compareTo e2) < 0) = * List("Bob", "John", "Steve", "Tom")</pre> @@ -383,7 +388,7 @@ case object Nil extends List[Nothing] { throw new NoSuchElementException("head of empty list") override def tail: List[Nothing] = throw new UnsupportedOperationException("tail of empty list") - // Removal of equals method here might lead to an infinite recusion similar to IntMap.equals. + // Removal of equals method here might lead to an infinite recursion similar to IntMap.equals. override def equals(that: Any) = that match { case that1: Seq[_] => that1.isEmpty case _ => false @@ -539,7 +544,7 @@ object List extends SeqFactory[List] { * Returns the `Left` values in the given `Iterable` * of `Either`s. */ - @deprecated("use `xs partialMap { case Left(x: A) => x }' instead of `List.lefts(xs)'") + @deprecated("use `xs collect { case Left(x: A) => x }' instead of `List.lefts(xs)'") def lefts[A, B](es: Iterable[Either[A, B]]) = es.foldRight[List[A]](Nil)((e, as) => e match { case Left(a) => a :: as @@ -549,7 +554,7 @@ object List extends SeqFactory[List] { /** * Returns the `Right` values in the given`Iterable` of `Either`s. */ - @deprecated("use `xs partialMap { case Right(x: B) => x }' instead of `List.rights(xs)'") + @deprecated("use `xs collect { case Right(x: B) => x }' instead of `List.rights(xs)'") def rights[A, B](es: Iterable[Either[A, B]]) = es.foldRight[List[B]](Nil)((e, bs) => e match { case Left(_) => bs @@ -561,9 +566,9 @@ object List extends SeqFactory[List] { * @param xs the iterable of Eithers to separate * @return a pair of lists. */ - @deprecated("use `Either.separate' instead") + @deprecated("use `(for (Left(x) <- es) yield x, for (Right(x) <- es) yield x)` instead") def separate[A,B](es: Iterable[Either[A, B]]): (List[A], List[B]) = - es.foldRight[(List[A], List[B])]((Nil, Nil)) { + es.foldRight[(List[A], List[B])]((Nil, Nil)) { case (Left(a), (lefts, rights)) => (a :: lefts, rights) case (Right(b), (lefts, rights)) => (lefts, b :: rights) } @@ -590,7 +595,7 @@ object List extends SeqFactory[List] { * * @param arr the array to convert * @param start the first index to consider - * @param len the lenght of the range to convert + * @param len the length of the range to convert * @return a list that contains the same elements than `arr` * in the same order */ diff --git a/src/library/scala/collection/immutable/ListMap.scala b/src/library/scala/collection/immutable/ListMap.scala index 0f66a1f452..d8e3e0856b 100644 --- a/src/library/scala/collection/immutable/ListMap.scala +++ b/src/library/scala/collection/immutable/ListMap.scala @@ -30,7 +30,7 @@ object ListMap extends ImmutableMapFactory[ListMap] { * directly, or by applying the function <code>ListMap.empty</code>. * * @author Matthias Zenger - * @author Martin Oderskty + * @author Martin Odersky * @version 2.0, 01/01/2007 * @since 1 */ diff --git a/src/library/scala/collection/immutable/LongMap.scala b/src/library/scala/collection/immutable/LongMap.scala index e527712475..0d74e41cec 100644 --- a/src/library/scala/collection/immutable/LongMap.scala +++ b/src/library/scala/collection/immutable/LongMap.scala @@ -138,6 +138,8 @@ import LongMap._; * <a href="http://citeseer.ist.psu.edu/okasaki98fast.html">Fast Mergeable Long Maps</a> * by Okasaki and Gill. Essentially a trie based on binary digits of the the integers. * + * Note: This class is as of 2.8 largely superseded by HashMap. + * * @since 2.7 */ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, LongMap[T]] { @@ -344,7 +346,7 @@ sealed abstract class LongMap[+T] extends Map[Long, T] with MapLike[Long, T, Lon } /** - * Forms the intersection of these two maps with a combinining function. The resulting map is + * Forms the intersection of these two maps with a combining function. The resulting map is * a map that has only keys present in both maps and has values produced from the original mappings * by combining them with f. * diff --git a/src/library/scala/collection/immutable/Map.scala b/src/library/scala/collection/immutable/Map.scala index f42794d09e..b5a852683a 100644 --- a/src/library/scala/collection/immutable/Map.scala +++ b/src/library/scala/collection/immutable/Map.scala @@ -44,7 +44,7 @@ trait Map[A, +B] extends Iterable[(A, B)] object Map extends ImmutableMapFactory[Map] { implicit def canBuildFrom[A, B]: CanBuildFrom[Coll, (A, B), Map[A, B]] = new MapCanBuildFrom[A, B] - def empty[A, B]: Map[A, B] = new EmptyMap[A, B] + def empty[A, B]: Map[A, B] = EmptyMap.asInstanceOf[Map[A, B]] class WithDefault[A, +B](underlying: Map[A, B], d: A => B) extends Map[A, B] { override def size = underlying.size @@ -58,12 +58,22 @@ object Map extends ImmutableMapFactory[Map] { } @serializable - class EmptyMap[A, +B] extends Map[A, B] { + private object EmptyMap extends Map[Any, Nothing] { + override def size: Int = 0 + def get(key: Any): Option[Nothing] = None + def iterator: Iterator[(Any, Nothing)] = Iterator.empty + override def updated [B1] (key: Any, value: B1): Map[Any, B1] = new Map1(key, value) + def + [B1](kv: (Any, B1)): Map[Any, B1] = updated(kv._1, kv._2) + def - (key: Any): Map[Any, Nothing] = this + } + + @serializable @deprecated("use `Map.empty' instead") + class EmptyMap[A,B] extends Map[A,B] { override def size: Int = 0 def get(key: A): Option[B] = None def iterator: Iterator[(A, B)] = Iterator.empty - override def updated [B1 >: B] (key: A, value: B1): Map[A, B1] = new Map1(key, value) - def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) + override def updated [B1] (key: A, value: B1): Map[A, B1] = new Map1(key, value) + def + [B1](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) def - (key: A): Map[A, B] = this } @@ -78,7 +88,7 @@ object Map extends ImmutableMapFactory[Map] { else new Map2(key1, value1, key, value) def + [B1 >: B](kv: (A, B1)): Map[A, B1] = updated(kv._1, kv._2) def - (key: A): Map[A, B] = - if (key == key1) empty else this + if (key == key1) Map.empty else this override def foreach[U](f: ((A, B)) => U): Unit = { f((key1, value1)) } diff --git a/src/library/scala/collection/immutable/MapLike.scala b/src/library/scala/collection/immutable/MapLike.scala index a06bce1038..662321bb0c 100644 --- a/src/library/scala/collection/immutable/MapLike.scala +++ b/src/library/scala/collection/immutable/MapLike.scala @@ -41,8 +41,9 @@ import generic._ * @version 2.8 * @since 2.8 */ -trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] extends scala.collection.MapLike[A, B, This] { -self => +trait MapLike[A, +B, +This <: MapLike[A, B, This] with Map[A, B]] + extends scala.collection.MapLike[A, B, This] +{ self => import scala.collection.Traversable @@ -74,16 +75,36 @@ self => * * @param elems the traversable object. */ - override def ++[B1 >: B](elems: Traversable[(A, B1)]): immutable.Map[A, B1] = - ((repr: immutable.Map[A, B1]) /: elems) (_ + _) + override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): immutable.Map[A, B1] = + ((repr: immutable.Map[A, B1]) /: xs) (_ + _) - /** Adds a number of elements provided by an iterator - * and returns a new collection with the added elements. - * - * @param iter the iterator + /** Filters this map by retaining only keys satisfying a predicate. + * @param p the predicate used to test keys + * @return an immutable map consisting only of those key value pairs of this map where the key satisfies + * the predicate `p`. The resulting map wraps the original map without copying any elements. */ - override def ++[B1 >: B] (iter: Iterator[(A, B1)]): immutable.Map[A, B1] = - ((repr: immutable.Map[A, B1]) /: iter) (_ + _) + override def filterKeys(p: A => Boolean): Map[A, B] = new DefaultMap[A, B] { + override def foreach[C](f: ((A, B)) => C): Unit = for (kv <- self) if (p(kv._1)) f(kv) + def iterator = self.iterator.filter(kv => p(kv._1)) + override def contains(key: A) = self.contains(key) && p(key) + def get(key: A) = if (!p(key)) None else self.get(key) + } + + /** Transforms this map by applying a function to every retrieved value. + * @param d the function used to transform values of this map. + * @return an immutable map which maps every key of this map + * to `f(this(key))`. The resulting map wraps the original map without copying any elements. + */ + /** A map view resulting from applying a given function `f` to each value + * associated with a key in this map. + */ + override def mapValues[C](f: B => C): Map[A, C] = new DefaultMap[A, C] { + override def foreach[D](g: ((A, C)) => D): Unit = for ((k, v) <- self) g((k, f(v))) + def iterator = for ((k, v) <- self.iterator) yield (k, f(v)) + override def size = self.size + override def contains(key: A) = self.contains(key) + def get(key: A) = self.get(key).map(f) + } /** This function transforms all the values of mappings contained * in this map with function <code>f</code>. @@ -97,23 +118,6 @@ self => b.result } - /** Returns a new map with all key/value pairs for which the predicate - * <code>p</code> returns <code>true</code>. - * - * @param p A predicate over key-value pairs - * @note This method works by successively removing elements fro which the - * predicate is false from this set. - * If removal is slow, or you expect that most elements of the set$ - * will be removed, you might consider using <code>filter</code> - * with a negated predicate instead. - */ - override def filterNot(p: ((A, B)) => Boolean): This = { - var res: This = repr - for (kv <- this) - if (p(kv)) res = (res - kv._1).asInstanceOf[This] // !!! concrete overrides abstract problem - res - } - @deprecated("use `updated' instead") def update[B1 >: B](key: A, value: B1): immutable.Map[A, B1] = updated(key, value).asInstanceOf[immutable.Map[A, B1]] } diff --git a/src/library/scala/collection/immutable/MapProxy.scala b/src/library/scala/collection/immutable/MapProxy.scala index 0ef7aa620a..371af042e7 100644 --- a/src/library/scala/collection/immutable/MapProxy.scala +++ b/src/library/scala/collection/immutable/MapProxy.scala @@ -37,5 +37,9 @@ trait MapProxy[A, +B] extends Map[A, B] with MapProxyLike[A, B, Map[A, B]] override def + [B1 >: B](kv: (A, B1)): Map[A, B1] = newProxy(self + kv) override def + [B1 >: B](elem1: (A, B1), elem2: (A, B1), elems: (A, B1) *) = newProxy(self.+(elem1, elem2, elems: _*)) + override def -(key: A) = newProxy(self - key) + + override def filterKeys(p: A => Boolean) = self.filterKeys(p) + override def mapValues[C](f: B => C) = self.mapValues(f) } diff --git a/src/library/scala/collection/immutable/NumericRange.scala b/src/library/scala/collection/immutable/NumericRange.scala index 5514f7a24d..d3e4558884 100644 --- a/src/library/scala/collection/immutable/NumericRange.scala +++ b/src/library/scala/collection/immutable/NumericRange.scala @@ -34,11 +34,18 @@ import generic._ * @version 2.8 */ @serializable -abstract class NumericRange[+T] +abstract class NumericRange[T] (val start: T, val end: T, val step: T, val isInclusive: Boolean) (implicit num: Integral[T]) extends IndexedSeq[T] { + /** Note that NumericRange must be invariant so that constructs + * such as + * + * 1L to 10 by 5 + * + * do not infer the range type as AnyVal. + */ import num._ private def fail(msg: String) = throw new IllegalArgumentException(msg) @@ -56,20 +63,18 @@ extends IndexedSeq[T] // inclusive/exclusiveness captured this way because we do not have any // concept of a "unit", we can't just add an epsilon to an exclusive // endpoint to make it inclusive (as can be done with the int-based Range.) - protected def limitTest[U >: T](x: U)(implicit unum: Integral[U]) = - !isEmpty && isInclusive && unum.equiv(x, end) + protected def limitTest(x: T) = !isEmpty && isInclusive && equiv(x, end) protected def underlying = collection.immutable.IndexedSeq.empty[T] /** Create a new range with the start and end values of this range and * a new <code>step</code>. */ - def by[U >: T](newStep: U)(implicit unum: Integral[U]): NumericRange[U] = - copy(start, end, newStep) + def by(newStep: T): NumericRange[T] = copy(start, end, newStep) /** Create a copy of this range. */ - def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): NumericRange[U] + def copy(start: T, end: T, step: T): NumericRange[T] override def foreach[U](f: T => U) { var i = start @@ -115,9 +120,8 @@ extends IndexedSeq[T] } // a well-typed contains method. - def containsTyped[U >: T](x: U)(implicit unum: Integral[U]): Boolean = { - import unum._ - def divides(d: U, by: U) = equiv(d % by, zero) + def containsTyped(x: T): Boolean = { + def divides(d: T, by: T) = equiv(d % by, zero) limitTest(x) || ( if (step > zero) @@ -154,7 +158,7 @@ extends IndexedSeq[T] // XXX This may be incomplete. new NumericRange[A](fm(start), fm(end), fm(step), isInclusive) { - def copy[A1 >: A](start: A1, end: A1, step: A1)(implicit unum: Integral[A1]): NumericRange[A1] = + def copy(start: A, end: A, step: A): NumericRange[A] = if (isInclusive) NumericRange.inclusive(start, end, step) else NumericRange(start, end, step) @@ -162,8 +166,7 @@ extends IndexedSeq[T] override def foreach[U](f: A => U) { underlyingRange foreach (x => f(fm(x))) } override def isEmpty = underlyingRange.isEmpty override def apply(idx: Int): A = fm(underlyingRange(idx)) - override def containsTyped[A1 >: A](el: A1)(implicit unum: Integral[A1]) = - underlyingRange exists (x => fm(x) == el) + override def containsTyped(el: A) = underlyingRange exists (x => fm(x) == el) } } @@ -200,7 +203,7 @@ extends IndexedSeq[T] object NumericRange { class Inclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) extends NumericRange(start, end, step, true) { - def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Inclusive[U] = + def copy(start: T, end: T, step: T): Inclusive[T] = NumericRange.inclusive(start, end, step) def exclusive: Exclusive[T] = NumericRange(start, end, step) @@ -208,7 +211,7 @@ object NumericRange { class Exclusive[T](start: T, end: T, step: T)(implicit num: Integral[T]) extends NumericRange(start, end, step, false) { - def copy[U >: T](start: U, end: U, step: U)(implicit unum: Integral[U]): Exclusive[U] = + def copy(start: T, end: T, step: T): Exclusive[T] = NumericRange(start, end, step) def inclusive: Inclusive[T] = NumericRange.inclusive(start, end, step) diff --git a/src/library/scala/collection/immutable/PagedSeq.scala b/src/library/scala/collection/immutable/PagedSeq.scala index bde8d67ffe..bd12502520 100644 --- a/src/library/scala/collection/immutable/PagedSeq.scala +++ b/src/library/scala/collection/immutable/PagedSeq.scala @@ -202,7 +202,7 @@ private class Page[T: ClassManifest](val num: Int) { /** The next page in the sequence */ var next : Page[T] = null - /** A later page in the sequence, serves a cachae for pointing to last page */ + /** A later page in the sequence, serves a cache for pointing to last page */ var later : Page[T] = this /** The number of characters read into this page */ @@ -218,11 +218,11 @@ private class Page[T: ClassManifest](val num: Int) { /** The index of the first character in this page relative to the whole sequence */ final def start = num * PageSize - /** The index of the character following the last charcater in this page relative + /** The index of the character following the last character in this page relative * to the whole sequence */ final def end = start + filled - /** The currently last page in the sequence; might change as more charcaters are appended */ + /** The currently last page in the sequence; might change as more characters are appended */ final def latest: Page[T] = { if (later.next != null) later = later.next.latest later diff --git a/src/library/scala/collection/immutable/Queue.scala b/src/library/scala/collection/immutable/Queue.scala index 9957f90ab3..02d344ceea 100644 --- a/src/library/scala/collection/immutable/Queue.scala +++ b/src/library/scala/collection/immutable/Queue.scala @@ -12,12 +12,8 @@ package scala.collection package immutable -import scala.annotation.tailrec - -object Queue { - val Empty: Queue[Nothing] = new Queue(Nil, Nil) - def apply[A](elems: A*) = new Queue(Nil, elems.toList) -} +import generic._ +import mutable.{ Builder, ListBuffer } /** <code>Queue</code> objects implement data structures that allow to * insert and retrieve elements in a first-in-first-out (FIFO) manner. @@ -28,10 +24,13 @@ object Queue { */ @serializable @SerialVersionUID(-7622936493364270175L) -class Queue[+A] protected( - protected val in: List[A], - protected val out: List[A]) extends Seq[A] -{ +class Queue[+A] protected(protected val in: List[A], protected val out: List[A]) + extends Seq[A] + with GenericTraversableTemplate[A, Queue] + with SeqLike[A, Queue[A]] { + + override def companion: GenericCompanion[Queue] = Queue + /** Returns the <code>n</code>-th element of this queue. * The first element is at position 0. * @@ -127,3 +126,13 @@ class Queue[+A] protected( */ override def toString() = mkString("Queue(", ", ", ")") } + +object Queue extends SeqFactory[Queue] { + implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Queue[A]] = new GenericCanBuildFrom[A] + def newBuilder[A]: Builder[A, Queue[A]] = new ListBuffer[A] mapResult (x => new Queue[A](Nil, x.toList)) + override def empty[A]: Queue[A] = new Queue[A](Nil, Nil) + override def apply[A](xs: A*): Queue[A] = new Queue[A](Nil, xs.toList) + + @deprecated("Use Queue.empty instead") + val Empty: Queue[Nothing] = Queue() +} diff --git a/src/library/scala/collection/immutable/Range.scala b/src/library/scala/collection/immutable/Range.scala index 47a97664de..43b11b67be 100644 --- a/src/library/scala/collection/immutable/Range.scala +++ b/src/library/scala/collection/immutable/Range.scala @@ -39,25 +39,34 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] def isInclusive = false - protected def limit = end - override def foreach[U](f: Int => U) { - var i = start - while (if (step > 0) i < limit else i > limit) { + if (fullLength > 0) { + val last = this.last + var i = start + while (i != last) { + f(i) + i += step + } f(i) - i += step } } - lazy val length: Int = { - def plen(start: Int, limit: Int, step: Int) = - if (limit <= start) 0 else (limit - start - 1) / step + 1 - if (step > 0) plen(start, limit, step) - else plen(limit, start, -step) + override def last: Int = if (step == 1 || step == -1) { + end - step + } else { + val size = end.toLong - start.toLong + val inclusiveLast = (size / step.toLong * step.toLong + start.toLong).toInt + if (size % step == 0) inclusiveLast - step else inclusiveLast } - final override def isEmpty = - if (step > 0) start >= limit else start <= limit + def length: Int = fullLength.toInt + + protected def fullLength: Long = if (end > start == step > 0 && start != end) + ((last.toLong - start.toLong) / step.toLong + 1) + else + 0 + + final override def isEmpty = length == 0 @inline final def apply(idx: Int): Int = { @@ -66,12 +75,19 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] } // take and drop have to be tolerant of large values without overflowing - private def locationAfterN(n: Int) = start + step * (0 max n min length) + private def locationAfterN(n: Int) = if (n > 0) { + if (step > 0) + ((start.toLong + step.toLong * n.toLong) min last.toLong).toInt + else + ((start.toLong + step.toLong * n.toLong) max last.toLong).toInt + } else { + start + } - final override def take(n: Int): Range = { - val limit1 = locationAfterN(n) - if (step > 0) Range(start, limit1 min limit, step) - else Range(start, limit1 max limit, step) + final override def take(n: Int): Range = if (n > 0 && length > 0) { + Range(start, locationAfterN(n - 1), step).inclusive + } else { + Range(start, start, step) } final override def drop(n: Int): Range = @@ -85,7 +101,11 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] private def skip(p: Int => Boolean): Int = { var s = start - while ((if (step > 0) s < limit else s > limit) && p(s)) s += step + if (length > 0) { + val last = this.last + while ((if (step > 0) s <= last else s >= last) && p(s)) + s += step + } s } @@ -103,16 +123,18 @@ class Range(val start: Int, val end: Int, val step: Int) extends IndexedSeq[Int] final override def dropRight(n: Int): Range = take(length - n) - final override def reverse: Range = new Range.Inclusive(last, start, -step) + final override def reverse: Range = if (length > 0) new Range.Inclusive(last, start, -step) else this /** Make range inclusive. - * @pre if (step > 0) end != MaxInt else end != MinInt */ def inclusive = new Range.Inclusive(start, end, step) - def contains(x: Int): Boolean = - if (step > 0) start <= x && x < limit && (x - start) % step == 0 - else start >= x && x > limit && (start - x) % step == 0 + final def contains(x: Int): Boolean = if (length > 0) { + if (step > 0) start <= x && x <= last && (x - start) % step == 0 + else start >= x && x >= last && (start - x) % step == 0 + } else { + false + } override def equals(other: Any) = other match { case x: Range => @@ -139,37 +161,45 @@ object Range { class Inclusive(start: Int, end: Int, step: Int) extends Range(start, end, step) { override def isInclusive = true - override protected val limit = end + math.signum(step) override protected def copy(start: Int, end: Int, step: Int): Range = new Inclusive(start, end, step) + override def last: Int = if (step == 1 || step == -1) + end + else + ((end.toLong - start.toLong) / step.toLong * step.toLong + start.toLong).toInt + protected override def fullLength: Long = if (end > start == step > 0 || start == end) + ((last.toLong - start.toLong) / step.toLong + 1) + else + 0 } - /** Make a range from `start` until `end` (exclusive) with step value 1. + /** Make a range from `start` until `end` (exclusive) with given step value. + * @note step != 0 */ def apply(start: Int, end: Int, step: Int): Range = new Range(start, end, step) /** Make an range from `start` to `end` inclusive with step value 1. - * @pre end != MaxInt */ def apply(start: Int, end: Int): Range with ByOne = new Range(start, end, 1) with ByOne /** Make an inclusive range from start to end with given step value. - * @pre step != 0 - * @pre if (step > 0) end != MaxInt else end != MinInt + * @note step != 0 */ def inclusive(start: Int, end: Int, step: Int): Range.Inclusive = new Inclusive(start, end, step) /** Make an inclusive range from start to end with step value 1. - * @pre end != MaxInt */ def inclusive(start: Int, end: Int): Range.Inclusive with ByOne = new Inclusive(start, end, 1) with ByOne trait ByOne extends Range { override final def foreach[U](f: Int => U) { - var i = start - val l = limit - while (i < l) { + if (length > 0) { + val last = this.last + var i = start + while (i != last) { + f(i) + i += 1 + } f(i) - i += 1 } } } diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index dfb34552cd..e7b4f3c978 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -12,7 +12,8 @@ package scala.collection package immutable -/** +/** An base class containing the implementations for TreeMaps and TreeSets + * * @since 2.3 */ @serializable @SerialVersionUID(8691885935445612921L) diff --git a/src/library/scala/collection/immutable/Set.scala b/src/library/scala/collection/immutable/Set.scala index be1e86bcdd..1bec1b9a48 100644 --- a/src/library/scala/collection/immutable/Set.scala +++ b/src/library/scala/collection/immutable/Set.scala @@ -41,12 +41,22 @@ trait Set[A] extends Iterable[A] object Set extends SetFactory[Set] { implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Set[A]] = setCanBuildFrom[A] - override def empty[A]: Set[A] = new EmptySet[A] + override def empty[A]: Set[A] = EmptySet.asInstanceOf[Set[A]] private val hashSeed = "Set".hashCode /** An optimized representation for immutable empty sets */ @serializable + private object EmptySet extends Set[Any] { + override def size: Int = 0 + def contains(elem: Any): Boolean = false + def + (elem: Any): Set[Any] = new Set1(elem) + def - (elem: Any): Set[Any] = this + def iterator: Iterator[Any] = Iterator.empty + override def foreach[U](f: Any => U): Unit = {} + } + + @serializable @deprecated("use `Set.empty' instead") class EmptySet[A] extends Set[A] { override def size: Int = 0 def contains(elem: A): Boolean = false @@ -66,7 +76,7 @@ object Set extends SetFactory[Set] { if (contains(elem)) this else new Set2(elem1, elem) def - (elem: A): Set[A] = - if (elem == elem1) new EmptySet[A] + if (elem == elem1) Set.empty else this def iterator: Iterator[A] = Iterator(elem1) diff --git a/src/library/scala/collection/immutable/SortedMap.scala b/src/library/scala/collection/immutable/SortedMap.scala index 316cab9b50..919b529a49 100644 --- a/src/library/scala/collection/immutable/SortedMap.scala +++ b/src/library/scala/collection/immutable/SortedMap.scala @@ -31,6 +31,8 @@ trait SortedMap[A, +B] extends Map[A, B] override protected[this] def newBuilder : Builder[(A, B), SortedMap[A, B]] = SortedMap.newBuilder[A, B] + override def empty: SortedMap[A, B] = SortedMap.empty + override def updated [B1 >: B](key: A, value: B1): SortedMap[A, B1] = this + ((key, value)) /** Add a key/value pair to this map. @@ -56,16 +58,8 @@ trait SortedMap[A, +B] extends Map[A, B] * * @param elems the traversable object. */ - override def ++[B1 >: B](elems: scala.collection.Traversable[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: elems) (_ + _) - - /** Adds a number of elements provided by an iterator - * and returns a new collection with the added elements. - * - * @param iter the iterator - */ - override def ++[B1 >: B] (iter: Iterator[(A, B1)]): SortedMap[A, B1] = - ((repr: SortedMap[A, B1]) /: iter) (_ + _) + override def ++[B1 >: B](xs: TraversableOnce[(A, B1)]): SortedMap[A, B1] = + ((repr: SortedMap[A, B1]) /: xs) (_ + _) } /** diff --git a/src/library/scala/collection/immutable/Stack.scala b/src/library/scala/collection/immutable/Stack.scala index a5d7e9515a..640fb39af5 100644 --- a/src/library/scala/collection/immutable/Stack.scala +++ b/src/library/scala/collection/immutable/Stack.scala @@ -37,7 +37,7 @@ object Stack extends SeqFactory[Stack] { @serializable @SerialVersionUID(1976480595012942526L) class Stack[+A] protected (protected val elems: List[A]) extends LinearSeq[A] with GenericTraversableTemplate[A, Stack] - with LinearSeqLike[A, Stack[A]] { + with LinearSeqOptimized[A, Stack[A]] { override def companion: GenericCompanion[Stack] = Stack def this() = this(Nil) @@ -74,18 +74,8 @@ class Stack[+A] protected (protected val elems: List[A]) extends LinearSeq[A] * @param elems the iterator object. * @return the stack with the new elements on top. */ - def pushAll[B >: A](elems: Iterator[B]): Stack[B] = - ((this: Stack[B]) /: elems)(_ push _) - - /** Push all elements provided by the given traversable object onto - * the stack. The last element returned by the iterable object - * will be on top of the new stack. - * - * @param elems the iterable object. - * @return the stack with the new elements on top. - */ - def pushAll[B >: A](elems: scala.collection.Traversable[B]): Stack[B] = - ((this: Stack[B]) /: elems)(_ push _) + def pushAll[B >: A](xs: TraversableOnce[B]): Stack[B] = + ((this: Stack[B]) /: xs.toIterator)(_ push _) /** Returns the top element of the stack. An error is signaled if * there is no element on the stack. diff --git a/src/library/scala/collection/immutable/Stream.scala b/src/library/scala/collection/immutable/Stream.scala index 1377fbe59d..3b10963ddb 100644 --- a/src/library/scala/collection/immutable/Stream.scala +++ b/src/library/scala/collection/immutable/Stream.scala @@ -40,7 +40,7 @@ import scala.annotation.tailrec */ abstract class Stream[+A] extends LinearSeq[A] with GenericTraversableTemplate[A, Stream] - with LinearSeqLike[A, Stream[A]] { + with LinearSeqOptimized[A, Stream[A]] { self => override def companion: GenericCompanion[Stream] = Stream @@ -113,17 +113,21 @@ self => * then StreamBuilder will be chosen for the implicit. * we recognize that fact and optimize to get more laziness. */ - override def ++[B >: A, That](that: Traversable[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { + override def ++[B >: A, That](that: TraversableOnce[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { // we assume there is no other builder factory on streams and therefore know that That = Stream[A] (if (isEmpty) that.toStream else new Stream.Cons(head, (tail ++ that).asInstanceOf[Stream[A]])).asInstanceOf[That] } - /** Create a new stream which contains all elements of this stream - * followed by all elements of Iterator `that' + /** + * Create a new stream which contains all intermediate results of applying the operator + * to subsequent elements left to right. + * @note This works because the target type of the Builder That is a Stream. */ - override def++[B >: A, That](that: Iterator[B])(implicit bf: CanBuildFrom[Stream[A], B, That]): That = - this ++ that.toStream + override final def scanLeft[B, That](z: B)(op: (B, A) => B)(implicit bf: CanBuildFrom[Stream[A], B, That]): That = { + (if (this.isEmpty) Stream(z) + else new Stream.Cons(z, tail.scanLeft(op(z, head))(op).asInstanceOf[Stream[B]])).asInstanceOf[That] + } /** Returns the stream resulting from applying the given function * <code>f</code> to each element of this stream. @@ -344,9 +348,9 @@ self => /** Builds a new stream from this stream in which any duplicates (wrt to ==) removed. * Among duplicate elements, only the first one is retained in the result stream */ - override def removeDuplicates: Stream[A] = + override def distinct: Stream[A] = if (isEmpty) this - else new Stream.Cons(head, tail.filter(head !=).removeDuplicates) + else new Stream.Cons(head, tail.filter(head !=).distinct) /** Returns a new sequence of given length containing the elements of this sequence followed by zero * or more occurrences of given elements. @@ -420,12 +424,12 @@ object Stream extends SeqFactory[Stream] { import scala.collection.{Iterable, Seq, IndexedSeq} /** A builder for streams - * @note: This builder is lazy only in the sense that it does not go downs the spine - * of traversables that are added as a whole. If more laziness can be achieved, - * this builder should be bypassed. + * @note This builder is lazy only in the sense that it does not go downs the spine + * of traversables that are added as a whole. If more laziness can be achieved, + * this builder should be bypassed. */ class StreamBuilder[A] extends scala.collection.mutable.LazyBuilder[A, Stream[A]] { - def result: Stream[A] = (for (xs <- parts.iterator; x <- xs.toIterable.iterator) yield x).toStream + def result: Stream[A] = parts.toStream flatMap (_.toStream) } object Empty extends Stream[Nothing] { diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index 500de352f6..5b5a627cfe 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -34,7 +34,7 @@ import StringLike._ /** * @since 2.8 */ -trait StringLike[+Repr] extends IndexedSeqLike[Char, Repr] with Ordered[String] { +trait StringLike[+Repr] extends IndexedSeqOptimized[Char, Repr] with Ordered[String] { self => /** Creates a string builder buffer as builder for this class */ @@ -263,7 +263,7 @@ self => * @param args the arguments used to instantiating the pattern. * @throws java.lang.IllegalArgumentException */ - def format(l: java.util.Locale, args: Any*): String = + def formatLocal(l: java.util.Locale, args: Any*): String = java.lang.String.format(l, toString, args map unwrapArg: _*) } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 1a3ed38e1c..79e1a6b00b 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -19,8 +19,7 @@ import mutable.{Builder, AddingBuilder} * * @since 1 */ -object TreeSet extends SortedSetFactory[TreeSet]{ - +object TreeSet extends SortedSetFactory[TreeSet] { implicit def implicitBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = newBuilder[A](ordering) override def newBuilder[A](implicit ordering: Ordering[A]): Builder[A, TreeSet[A]] = new AddingBuilder(empty[A](ordering)) @@ -28,7 +27,6 @@ object TreeSet extends SortedSetFactory[TreeSet]{ /** The empty set of this type */ def empty[A](implicit ordering: Ordering[A]) = new TreeSet[A] - } /** This class implements immutable sets using a tree. diff --git a/src/library/scala/collection/immutable/Vector.scala b/src/library/scala/collection/immutable/Vector.scala index 1326768090..6defe66d6f 100644 --- a/src/library/scala/collection/immutable/Vector.scala +++ b/src/library/scala/collection/immutable/Vector.scala @@ -19,24 +19,25 @@ import scala.collection.mutable.Builder object Vector extends SeqFactory[Vector] { - /*private[immutable]*/ val BF = new GenericCanBuildFrom[Nothing] { + private[immutable] val BF = new GenericCanBuildFrom[Nothing] { override def apply() = newBuilder[Nothing] } @inline implicit def canBuildFrom[A]: CanBuildFrom[Coll, A, Vector[A]] = BF.asInstanceOf[CanBuildFrom[Coll, A, Vector[A]]] def newBuilder[A]: Builder[A, Vector[A]] = new VectorBuilder[A] - /*private[immutable]*/ val NIL = new Vector[Nothing](0, 0, 0) + private[immutable] val NIL = new Vector[Nothing](0, 0, 0) @inline override def empty[A]: Vector[A] = NIL } -// TODO: most members are still public -> restrict access (caveat: private prevents inlining) +// in principle, most members should be private. however, access privileges must +// be carefully chosen to not prevent method inlining @serializable -final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends Seq[A] +final class Vector[+A](startIndex: Int, endIndex: Int, focus: Int) extends IndexedSeq[A] with GenericTraversableTemplate[A, Vector] - with SeqLike[A, Vector[A]] - with VectorPointer[A @uncheckedVariance] { + with IndexedSeqLike[A, Vector[A]] + with VectorPointer[A @uncheckedVariance] { self => override def companion: GenericCompanion[Vector] = Vector @@ -45,7 +46,7 @@ override def companion: GenericCompanion[Vector] = Vector //assert(focus >= 0, focus+"<0") //assert(focus <= endIndex, focus+">"+endIndex) - /*private*/ var dirty = false + private[immutable] var dirty = false def length = endIndex - startIndex @@ -60,20 +61,35 @@ override def companion: GenericCompanion[Vector] = Vector s } + + // can still be improved + override /*SeqLike*/ + def reverseIterator: Iterator[A] = new Iterator[A] { + private var i = self.length + def hasNext: Boolean = 0 < i + def next: A = + if (0 < i) { + i -= 1 + self(i) + } else Iterator.empty.next + } + + // TODO: reverse + // TODO: check performance of foreach/map etc. should override or not? // Ideally, clients will inline calls to map all the way down, including the iterator/builder methods. // In principle, escape analysis could even remove the iterator/builder allocations and do it // with local variables exclusively. But we're not quite there yet ... - @inline def foreach0[U](f: A => U): Unit = iterator.foreach0(f) - @inline def map0[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = { + @deprecated("this method is experimental and will be removed in a future release") + @inline def foreachFast[U](f: A => U): Unit = iterator.foreachFast(f) + @deprecated("this method is experimental and will be removed in a future release") + @inline def mapFast[B, That](f: A => B)(implicit bf: CanBuildFrom[Vector[A], B, That]): That = { val b = bf(repr) - foreach0(x => b += f(x)) + foreachFast(x => b += f(x)) b.result } - // TODO: reverse - // TODO: reverseIterator def apply(index: Int): A = { val idx = checkRangeConvert(index) @@ -108,41 +124,71 @@ override def companion: GenericCompanion[Vector] = Vector } override def take(n: Int): Vector[A] = { - if (n < 0) throw new IllegalArgumentException(n.toString) - if (startIndex + n < endIndex) { + if (n <= 0) + Vector.empty + else if (startIndex + n < endIndex) dropBack0(startIndex + n) - } else + else this } override def drop(n: Int): Vector[A] = { - if (n < 0) throw new IllegalArgumentException(n.toString) - if (startIndex + n < endIndex) { + if (n <= 0) + this + else if (startIndex + n < endIndex) dropFront0(startIndex + n) - } else + else Vector.empty } override def takeRight(n: Int): Vector[A] = { - if (n < 0) throw new IllegalArgumentException(n.toString) - if (endIndex - n > startIndex) { + if (n <= 0) + Vector.empty + else if (endIndex - n > startIndex) dropFront0(endIndex - n) - } else + else this } override def dropRight(n: Int): Vector[A] = { - if (n < 0) throw new IllegalArgumentException(n.toString) - if (endIndex - n > startIndex) { + if (n <= 0) + this + else if (endIndex - n > startIndex) dropBack0(endIndex - n) - } else + else Vector.empty } + override /*IterableLike*/ def head: A = { + if (isEmpty) throw new UnsupportedOperationException("empty.head") + apply(0) + } + + override /*TraversableLike*/ def tail: Vector[A] = { + if (isEmpty) throw new UnsupportedOperationException("empty.tail") + drop(1) + } + + override /*TraversableLike*/ def last: A = { + if (isEmpty) throw new UnsupportedOperationException("empty.last") + apply(length-1) + } + + override /*TraversableLike*/ def init: Vector[A] = { + if (isEmpty) throw new UnsupportedOperationException("empty.init") + dropRight(1) + } + + override /*IterableLike*/ def slice(from: Int, until: Int): Vector[A] = + take(until).drop(from) + + override /*IterableLike*/ def splitAt(n: Int): (Vector[A], Vector[A]) = (take(n), drop(n)) + + // semi-private api - def updateAt[B >: A](index: Int, elem: B): Vector[B] = { + private[immutable] def updateAt[B >: A](index: Int, elem: B): Vector[B] = { val idx = checkRangeConvert(index) val s = new Vector[B](startIndex, endIndex, idx) s.initFrom(this) @@ -153,7 +199,6 @@ override def companion: GenericCompanion[Vector] = Vector } - private def gotoPosWritable(oldIndex: Int, newIndex: Int, xor: Int) = if (dirty) { gotoPosWritable1(oldIndex, newIndex, xor) } else { @@ -168,7 +213,7 @@ override def companion: GenericCompanion[Vector] = Vector dirty = true } - def appendFront[B>:A](value: B): Vector[B] = { + private[immutable] def appendFront[B>:A](value: B): Vector[B] = { if (endIndex != startIndex) { var blockIndex = (startIndex - 1) & ~31 var lo = (startIndex - 1) & 31 @@ -263,7 +308,7 @@ override def companion: GenericCompanion[Vector] = Vector } } - def appendBack[B>:A](value: B): Vector[B] = { + private[immutable] def appendBack[B>:A](value: B): Vector[B] = { // //println("------- append " + value) // debug() if (endIndex != startIndex) { @@ -361,22 +406,22 @@ override def companion: GenericCompanion[Vector] = Vector display5 = copyRange(display5, oldLeft, newLeft) } - def zeroLeft(array: Array[AnyRef], index: Int): Unit = { + private def zeroLeft(array: Array[AnyRef], index: Int): Unit = { var i = 0; while (i < index) { array(i) = null; i+=1 } } - def zeroRight(array: Array[AnyRef], index: Int): Unit = { + private def zeroRight(array: Array[AnyRef], index: Int): Unit = { var i = index; while (i < array.length) { array(i) = null; i+=1 } } - def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = { + private def copyLeft(array: Array[AnyRef], right: Int): Array[AnyRef] = { // if (array eq null) // println("OUCH!!! " + right + "/" + depth + "/"+startIndex + "/" + endIndex + "/" + focus) val a2 = new Array[AnyRef](array.length) Platform.arraycopy(array, 0, a2, 0, right) a2 } - def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = { + private def copyRight(array: Array[AnyRef], left: Int): Array[AnyRef] = { val a2 = new Array[AnyRef](array.length) Platform.arraycopy(array, left, a2, left, a2.length - left) a2 @@ -592,18 +637,17 @@ final class VectorIterator[+A](_startIndex: Int, _endIndex: Int) extends Iterato res } - // TODO: take - // TODO: drop + // TODO: drop (important?) - // TODO: remove! - @inline def foreach0[U](f: A => U) { while (hasNext) f(next()) } + @deprecated("this method is experimental and will be removed in a future release") + @inline def foreachFast[U](f: A => U) { while (hasNext) f(next()) } } final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A @uncheckedVariance] { - // TODO: possible alternative: start with display0 = null, blockIndex = -32, lo = 32 - // to avoid allocation initial array if the result will be empty anyways + // possible alternative: start with display0 = null, blockIndex = -32, lo = 32 + // to avoid allocating initial array if the result will be empty anyways display0 = new Array[AnyRef](32) depth = 1 @@ -612,7 +656,7 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A private var lo = 0 def += (elem: A): this.type = { - if (lo == 32) { + if (lo >= display0.length) { val newBlockIndex = blockIndex+32 gotoNextBlockStartWritable(newBlockIndex, blockIndex ^ newBlockIndex) blockIndex = newBlockIndex @@ -624,11 +668,12 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A } def result: Vector[A] = { - if (blockIndex + lo == 0) + val size = blockIndex + lo + if (size == 0) return Vector.empty - val s = new Vector[A](0, blockIndex + lo, 0) // TODO: should focus front or back? + val s = new Vector[A](0, size, 0) // should focus front or back? s.initFrom(this) - if (depth > 1) s.gotoPos(0, blockIndex + lo) + if (depth > 1) s.gotoPos(0, size - 1) // we're currently focused to size - 1, not size! s } @@ -643,18 +688,18 @@ final class VectorBuilder[A]() extends Builder[A,Vector[A]] with VectorPointer[A private[immutable] trait VectorPointer[T] { - var depth: Int = _ - var display0: Array[AnyRef] = _ - var display1: Array[AnyRef] = _ - var display2: Array[AnyRef] = _ - var display3: Array[AnyRef] = _ - var display4: Array[AnyRef] = _ - var display5: Array[AnyRef] = _ + private[immutable] var depth: Int = _ + private[immutable] var display0: Array[AnyRef] = _ + private[immutable] var display1: Array[AnyRef] = _ + private[immutable] var display2: Array[AnyRef] = _ + private[immutable] var display3: Array[AnyRef] = _ + private[immutable] var display4: Array[AnyRef] = _ + private[immutable] var display5: Array[AnyRef] = _ // used - final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth) + private[immutable] final def initFrom[U](that: VectorPointer[U]): Unit = initFrom(that, that.depth) - final def initFrom[U](that: VectorPointer[U], depth: Int) = { + private[immutable] final def initFrom[U](that: VectorPointer[U], depth: Int) = { this.depth = depth (depth - 1) match { case -1 => @@ -690,7 +735,7 @@ private[immutable] trait VectorPointer[T] { // requires structure is at pos oldIndex = xor ^ index - final def getElem(index: Int, xor: Int): T = { + private[immutable] final def getElem(index: Int, xor: Int): T = { if (xor < (1 << 5)) { // level = 0 display0(index & 31).asInstanceOf[T] } else @@ -717,7 +762,7 @@ private[immutable] trait VectorPointer[T] { // go to specific position // requires structure is at pos oldIndex = xor ^ index, // ensures structure is at pos index - final def gotoPos(index: Int, xor: Int): Unit = { + private[immutable] final def gotoPos(index: Int, xor: Int): Unit = { if (xor < (1 << 5)) { // level = 0 (could maybe removed) } else if (xor < (1 << 10)) { // level = 1 @@ -754,7 +799,7 @@ private[immutable] trait VectorPointer[T] { // USED BY ITERATOR // xor: oldIndex ^ index - final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos + private[immutable] final def gotoNextBlockStart(index: Int, xor: Int): Unit = { // goto block start pos if (xor < (1 << 10)) { // level = 1 display0 = display1((index >> 5) & 31).asInstanceOf[Array[AnyRef]] } else @@ -787,7 +832,7 @@ private[immutable] trait VectorPointer[T] { // USED BY BUILDER // xor: oldIndex ^ index - final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos + private[immutable] final def gotoNextBlockStartWritable(index: Int, xor: Int): Unit = { // goto block start pos if (xor < (1 << 10)) { // level = 1 if (depth == 1) { display1 = new Array(32); display1(0) = display0; depth+=1} display0 = new Array(32) @@ -841,14 +886,15 @@ private[immutable] trait VectorPointer[T] { // STUFF BELOW USED BY APPEND / UPDATE - final def copyOf(a: Array[AnyRef]) = { + private[immutable] final def copyOf(a: Array[AnyRef]) = { //println("copy") + if (a eq null) println ("NULL") val b = new Array[AnyRef](a.length) Platform.arraycopy(a, 0, b, 0, a.length) b } - final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = { + private[immutable] final def nullSlotAndCopy(array: Array[AnyRef], index: Int) = { //println("copy and null") val x = array(index) array(index) = null @@ -860,7 +906,7 @@ private[immutable] trait VectorPointer[T] { // requires structure is at pos index // ensures structure is clean and at pos index and writable at all levels except 0 - final def stabilize(index: Int) = (depth - 1) match { + private[immutable] final def stabilize(index: Int) = (depth - 1) match { case 5 => display5 = copyOf(display5) display4 = copyOf(display4) @@ -901,16 +947,13 @@ private[immutable] trait VectorPointer[T] { - - - /// USED IN UPDATE AND APPEND BACK // prepare for writing at an existing position // requires structure is clean and at pos oldIndex = xor ^ newIndex, // ensures structure is dirty and at pos newIndex and writable at level 0 - final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match { + private[immutable] final def gotoPosWritable0(newIndex: Int, xor: Int): Unit = (depth - 1) match { case 5 => display5 = copyOf(display5) display4 = nullSlotAndCopy(display5, (newIndex >> 25) & 31).asInstanceOf[Array[AnyRef]] @@ -943,7 +986,7 @@ private[immutable] trait VectorPointer[T] { // requires structure is dirty and at pos oldIndex, // ensures structure is dirty and at pos newIndex and writable at level 0 - final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { + private[immutable] final def gotoPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { if (xor < (1 << 5)) { // level = 0 display0 = copyOf(display0) } else @@ -1009,7 +1052,7 @@ private[immutable] trait VectorPointer[T] { // USED IN DROP - final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = { + private[immutable] final def copyRange(array: Array[AnyRef], oldLeft: Int, newLeft: Int) = { val elems = new Array[AnyRef](32) Platform.arraycopy(array, oldLeft, elems, newLeft, 32 - math.max(newLeft,oldLeft)) elems @@ -1023,7 +1066,7 @@ private[immutable] trait VectorPointer[T] { // requires structure is clean and at pos oldIndex, // ensures structure is dirty and at pos newIndex and writable at level 0 - final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos + private[immutable] final def gotoFreshPosWritable0(oldIndex: Int, newIndex: Int, xor: Int): Unit = { // goto block start pos if (xor < (1 << 5)) { // level = 0 //println("XXX clean with low xor") } else @@ -1103,7 +1146,7 @@ private[immutable] trait VectorPointer[T] { // requires structure is dirty and at pos oldIndex, // ensures structure is dirty and at pos newIndex and writable at level 0 - final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { + private[immutable] final def gotoFreshPosWritable1(oldIndex: Int, newIndex: Int, xor: Int): Unit = { stabilize(oldIndex) gotoFreshPosWritable0(oldIndex, newIndex, xor) } @@ -1113,7 +1156,7 @@ private[immutable] trait VectorPointer[T] { // DEBUG STUFF - def debug(): Unit = { + private[immutable] def debug(): Unit = { return /* //println("DISPLAY 5: " + display5 + " ---> " + (if (display5 ne null) display5.map(x=> if (x eq null) "." else x + "->" +x.asInstanceOf[Array[AnyRef]].mkString("")).mkString(" ") else "null")) |