diff options
Diffstat (limited to 'src/library')
36 files changed, 1846 insertions, 482 deletions
diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 9cde20f1df..6b11371bec 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -138,8 +138,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] = if (hash == this.hash && key == this.key ) { - if (merger eq null) new HashMap1(key, hash, value, kv) - else new HashMap1(key, hash, value, merger(this.kv, kv)) + if (merger eq null) { + if(this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this + else new HashMap1(key, hash, value, kv) + } else new HashMap1(key, hash, value, merger(this.kv, kv)) } else { var thatindex = (hash >>> level) & 0x1f var thisindex = (this.hash >>> level) & 0x1f @@ -271,13 +273,15 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val mask = (1 << index) val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { - val elemsNew = new Array[HashMap[A,B1]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) val sub = elems(offset) // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) - elemsNew(offset) = subNew - new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + if(subNew eq sub) this else { + val elemsNew = new Array[HashMap[A,B1]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + elemsNew(offset) = subNew + new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + } } else { val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) Array.copy(elems, 0, elemsNew, 0, offset) @@ -295,7 +299,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val sub = elems(offset) // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.removed0(key, hash, level + 5) - if (subNew.isEmpty) { + if (subNew eq sub) this + else if (subNew.isEmpty) { val bitmapNew = bitmap ^ mask if (bitmapNew != 0) { val elemsNew = new Array[HashMap[A,B]](elems.length - 1) diff --git a/src/library/scala/collection/mutable/BasicNode.java b/src/library/scala/collection/mutable/BasicNode.java new file mode 100644 index 0000000000..b934aed24f --- /dev/null +++ b/src/library/scala/collection/mutable/BasicNode.java @@ -0,0 +1,20 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + + + + +abstract class BasicNode { + + public abstract String string(int lev); + +}
\ No newline at end of file diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala new file mode 100644 index 0000000000..6ed3a516c4 --- /dev/null +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -0,0 +1,1003 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package mutable + + + +import java.util.concurrent.atomic._ +import collection.immutable.{ ListMap => ImmutableListMap } +import collection.parallel.mutable.ParCtrie +import generic._ +import annotation.tailrec +import annotation.switch + + + +private[mutable] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen) = this(null, g) + + @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.RDCSS_READ_ROOT(true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + @inline private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen) + nin.WRITE(cn) + nin + } + + @inline final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = { + val nin = new INode[K, V](ngen) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] => + if (sn.hc == hc && sn.k == k) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + GCAS(cn, nn, ct) + } + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + } + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v` + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] => cond match { + case null => + if (sn.hc == hc && sn.k == k) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && sn.k == k) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && sn.k == k) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv: V => + if (sn.hc == hc && sn.k == k && sn.v == otherv) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + } + } else cond match { + case null | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv: V => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + @inline def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case null => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv: V => + ln.get(k) match { + case Some(v0) if v0 == otherv => if (insertln()) Some(otherv) else null + case _ => None + } + } + } + } + + /** Looks up the value associated with the key. + * + * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise + */ + @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) null // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else return RESTART // used to be throw RestartException + } + case sn: SNode[K, V] => // 2) singleton node + if (sn.hc == hc && sn.k == k) sn.v.asInstanceOf[AnyRef] + else null + } + } + case tn: TNode[K, V] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART // used to be throw RestartException + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else null + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].orNull + } + } + + /** Removes the key associated with the given value. + * + * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] => + if (sn.hc == hc && sn.k == k && (v == null || sn.v == v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef) { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) nonlive match { + case tn: TNode[K, V] => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.RDCSS_READ_ROOT().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (v == null) { + val optv = ln.get(k) + val nn = ln.removed(k) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if v0 == v => + val nn = ln.removed(k) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + } + } + + private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "<null>" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "<elem: %s>".format(x) + }) + +} + + +private[mutable] object INode { + val KEY_PRESENT = new AnyRef + val KEY_ABSENT = new AnyRef + + def newRootNode[K, V] = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen) + } +} + + +private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[mutable] trait KVNode[K, V] { + def kvPair: (K, V) +} + + +private[mutable] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) +extends BasicNode with KVNode[K, V] { + final def copy = new SNode(k, v, hc) + final def copyTombed = new TNode(k, v, hc) + final def copyUntombed = new SNode(k, v, hc) + final def kvPair = (k, v) + final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + + +private[mutable] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) +extends MainNode[K, V] with KVNode[K, V] { + final def copy = new TNode(k, v, hc) + final def copyTombed = new TNode(k, v, hc) + final def copyUntombed = new SNode(k, v, hc) + final def kvPair = (k, v) + final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + + +private[mutable] final class LNode[K, V](final val listmap: ImmutableListMap[K, V]) +extends MainNode[K, V] { + def this(k: K, v: V) = this(ImmutableListMap(k -> v)) + def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2)) + def inserted(k: K, v: V) = new LNode(listmap + ((k, v))) + def removed(k: K): MainNode[K, V] = { + val updmap = listmap - k + if (updmap.size > 1) new LNode(updmap) + else { + val (k, v) = updmap.iterator.next + new TNode(k, v, Ctrie.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses + } + } + def get(k: K) = listmap.get(k) + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", ")) +} + + +private[mutable] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen) +extends MainNode[K, V] { + + final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + final def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = nn + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + final def renewed(ngen: Gen, ct: Ctrie[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + final def toCompressed(ct: Ctrie[K, V], lev: Int, gen: Gen) = { + var bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] => + val inodemain = in.GCAS_READ(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] => + tmparray(i) = sn + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */ + protected def collectElems: Seq[(K, V)] = array flatMap { + case sn: SNode[K, V] => Some(sn.kvPair) + case in: INode[K, V] => in.mainnode match { + case tn: TNode[K, V] => Some(tn.kvPair) + case ln: LNode[K, V] => ln.listmap.toList + case cn: CNode[K, V] => cn.collectElems + } + } + + protected def collectLocalElems: Seq[String] = array flatMap { + case sn: SNode[K, V] => Some(sn.kvPair._2.toString) + case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")") + } + + override def toString = { + val elems = collectLocalElems + "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) + } +} + + +private[mutable] object CNode { + + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen)//(Ctrie.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v) + } + +} + + +private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +/** A concurrent hash-trie or Ctrie is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf + * + * @author Aleksandar Prokopec + * @since 2.10 + */ +@SerialVersionUID(0L - 6402774413839597105L) +final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef]) +extends ConcurrentMap[K, V] + with MapLike[K, V, Ctrie[K, V]] + with CustomParallelizable[(K, V), ParCtrie[K, V]] + with Serializable +{ + import Ctrie.computeHash + + private var rootupdater = rtupd + @volatile var root = r + + def this() = this( + INode.newRootNode, + AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root") + ) + + /* internal methods */ + + private def writeObject(out: java.io.ObjectOutputStream) { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(CtrieSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream) { + root = INode.newRootNode + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root") + + var obj: AnyRef = null + do { + obj = in.readObject() + if (obj != CtrieSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } while (obj != CtrieSerializationEnd) + } + + @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] => in + case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] => in + case desc: RDCSS_Descriptor[K, V] => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.GCAS_READ(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(false) + /*READ*/desc.committed + } else false + } + + @tailrec private def inserthc(k: K, hc: Int, v: V) { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond) + else ret + } + + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /* slower: + //@tailrec + private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + try { + r.rec_lookup(k, hc, 0, null, r.gen, this) + } catch { + case RestartException => + lookuphc(k, hc) + } + } + */ + + @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, hc) + } + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + override def seq = this + + override def par = new ParCtrie(this) + + override def empty: Ctrie[K, V] = new Ctrie[K, V] + + @inline final def isReadOnly = rootupdater eq null + + @inline final def nonReadOnly = rootupdater ne null + + /** Returns a snapshot of this Ctrie. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this Ctrie are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * Ctrie is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ + @tailrec final def snapshot(): Ctrie[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.GCAS_READ(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater) + else snapshot() + } + + /** Returns a read-only snapshot of this Ctrie. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this Ctrie are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this Ctrie by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ + @tailrec final def readOnlySnapshot(): collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.GCAS_READ(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null) + else readOnlySnapshot() + } + + @tailrec final override def clear() { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.GCAS_READ(this), INode.newRootNode[K, V])) clear() + } + + final def lookup(k: K): V = { + val hc = computeHash(k) + lookuphc(k, hc).asInstanceOf[V] + } + + final override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq null) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + final def get(k: K): Option[V] = { + val hc = computeHash(k) + Option(lookuphc(k, hc)).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, null) + } + + final override def update(k: K, v: V) { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + final def +=(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + final override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k, null.asInstanceOf[V], hc) + } + + final def -=(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT) + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT) + } + + def iterator: Iterator[(K, V)] = + if (nonReadOnly) readOnlySnapshot().iterator + else new CtrieIterator(0, this) + + override def stringPrefix = "Ctrie" + +} + + +object Ctrie extends MutableMapFactory[Ctrie] { + val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V] + + def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V] + + @inline final def computeHash[K](k: K): Int = { + var hcode = k.hashCode + hcode = hcode * 0x9e3775cd + hcode = java.lang.Integer.reverseBytes(hcode) + hcode * 0x9e3775cd + } + +} + + +private[collection] class CtrieIterator[K, V](var level: Int, ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { + var stack = new Array[Array[BasicNode]](7) + var stackpos = new Array[Int](7) + var depth = -1 + var subiter: Iterator[(K, V)] = null + var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.GCAS_READ(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.listmap.iterator + checkSubiter() + case null => + current = null + } + + @inline private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + @inline private def initialize() { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] => + current = sn + case in: INode[K, V] => + readin(in) + } + } else { + depth -= 1 + advance() + } + } else current = null + + protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit) + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the Ctrie. + */ + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = subiter + subiter = null + advance() + this.level += 1 + Seq(it, this) + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = newIterator(level + 1, ct, false) + it.stack(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + this.level += 1 + return Seq(this, it) + } + d += 1 + } + this.level += 1 + Seq(this) + } + + private def print { + println("ctrie iterator") + println(stackpos.mkString(",")) + println("depth: " + depth) + println("curr.: " + current) + println(stack.mkString("\n")) + } + +} + + +private[mutable] object RestartException extends util.control.ControlThrowable + + +/** Only used for ctrie serialization. */ +@SerialVersionUID(0L - 7237891413820527142L) +private[mutable] case object CtrieSerializationEnd + + +private[mutable] object Debug { + import collection._ + + lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef] + + def log(s: AnyRef) = logbuffer.add(s) + + def flush() { + for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString) + logbuffer.clear() + } + + def clear() { + logbuffer.clear() + } + +} + + + + + + + + + + diff --git a/src/library/scala/collection/mutable/Gen.java b/src/library/scala/collection/mutable/Gen.java new file mode 100644 index 0000000000..0c9a30d198 --- /dev/null +++ b/src/library/scala/collection/mutable/Gen.java @@ -0,0 +1,18 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + + + + +final class Gen { +} + diff --git a/src/library/scala/collection/mutable/INodeBase.java b/src/library/scala/collection/mutable/INodeBase.java new file mode 100644 index 0000000000..487b5cfc28 --- /dev/null +++ b/src/library/scala/collection/mutable/INodeBase.java @@ -0,0 +1,35 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + + + +abstract class INodeBase<K, V> extends BasicNode { + + public static final AtomicReferenceFieldUpdater<INodeBase, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode"); + + public static final Object RESTART = new Object(); + + public volatile MainNode<K, V> mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +}
\ No newline at end of file diff --git a/src/library/scala/collection/mutable/MainNode.java b/src/library/scala/collection/mutable/MainNode.java new file mode 100644 index 0000000000..09bc858edc --- /dev/null +++ b/src/library/scala/collection/mutable/MainNode.java @@ -0,0 +1,36 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + + + +abstract class MainNode<K, V> extends BasicNode { + + public static final AtomicReferenceFieldUpdater<MainNode, MainNode> updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev"); + + public volatile MainNode<K, V> prev = null; + + public boolean CAS_PREV(MainNode<K, V> oldval, MainNode<K, V> nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode<K, V> nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + public MainNode<K, V> READ_PREV() { + return updater.get(this); + } + +}
\ No newline at end of file diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index d1453c9ce9..e304be92ae 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -34,7 +34,6 @@ import scala.collection.generic.Sizing */ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { //self: EnvironmentPassingCombiner[Elem, To] => - private[collection] final val tasksupport = getTaskSupport /** Combines the contents of the receiver builder and the `other` builder, * producing a new builder containing both their elements. @@ -62,7 +61,14 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { * @return the parallel builder containing both the elements of this and the `other` builder */ def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] - + + /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared + * across several threads constructing the collection. + * + * By default, this method returns `false`. + */ + def canBeShared: Boolean = false + } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 390bd72ab5..7c5a835e56 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -96,17 +96,6 @@ import annotation.unchecked.uncheckedVariance * The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible * way to change between different collection types. * - * The method: - * - * {{{ - * def threshold(sz: Int, p: Int): Int - * }}} - * - * provides an estimate on the minimum number of elements the collection has before - * the splitting stops and depends on the number of elements in the collection. A rule of the - * thumb is the number of elements divided by 8 times the parallelism level. This method may - * be overridden in concrete implementations if necessary. - * * Since this trait extends the `Iterable` trait, methods like `size` must also * be implemented in concrete collections, while `iterator` forwards to `splitter` by * default. @@ -165,53 +154,17 @@ extends GenIterableLike[T, Repr] with HasNewCombiner[T, Repr] { self: ParIterableLike[T, Repr, Sequential] => - + import tasksupport._ - + def seq: Sequential def repr: Repr = this.asInstanceOf[Repr] - /** Parallel iterators are split iterators that have additional accessor and - * transformer methods defined in terms of methods `next` and `hasNext`. - * When creating a new parallel collection, one might want to override these - * new methods to make them more efficient. - * - * Parallel iterators are augmented with signalling capabilities. This means - * that a signalling object can be assigned to them as needed. - * - * The self-type ensures that signal context passing behaviour gets mixed in - * a concrete object instance. - */ - trait ParIterator extends IterableSplitter[T] { - me: SignalContextPassingIterator[ParIterator] => - var signalDelegate: Signalling = IdleSignalling - def repr = self.repr - def split: Seq[IterableSplitter[T]] - } - - /** A stackable modification that ensures signal contexts get passed along the iterators. - * A self-type requirement in `ParIterator` ensures that this trait gets mixed into - * concrete iterators. - */ - trait SignalContextPassingIterator[+IterRepr <: ParIterator] extends ParIterator { - // Note: This functionality must be factored out to this inner trait to avoid boilerplate. - // Also, one could omit the cast below. However, this leads to return type inconsistencies, - // due to inability to override the return type of _abstract overrides_. - // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid - // on the type of iterators it splits. - // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here. - abstract override def split: Seq[IterRepr] = { - val pits = super.split - pits foreach { _.signalDelegate = signalDelegate } - pits.asInstanceOf[Seq[IterRepr]] - } - } - def hasDefiniteSize = true def nonEmpty = size != 0 - + /** Creates a new parallel iterator used to traverse the elements of this parallel collection. * This iterator is more specific than the iterator of the returned by `iterator`, and augmented * with additional accessor and transformer methods. @@ -242,18 +195,6 @@ self: ParIterableLike[T, Repr, Sequential] => */ def isStrictSplitterCollection = true - /** Some minimal number of elements after which this collection should be handled - * sequentially by different processors. - * - * This method depends on the size of the collection and the parallelism level, which - * are both specified as arguments. - * - * @param sz the size based on which to compute the threshold - * @param p the parallelism level based on which to compute the threshold - * @return the maximum number of elements for performing operations sequentially - */ - def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p) - /** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool. * This method forwards the call to `newCombiner`. */ @@ -293,7 +234,7 @@ self: ParIterableLike[T, Repr, Sequential] => trait SignallingOps[PI <: DelegatedSignalling] { def assign(cntx: Signalling): PI } - + /* convenience task operations wrapper */ protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] { def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) { @@ -321,7 +262,7 @@ self: ParIterableLike[T, Repr, Sequential] => it } } - + protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] { def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] { def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) { @@ -331,12 +272,12 @@ self: ParIterableLike[T, Repr, Sequential] => def isCombiner = cb.isInstanceOf[Combiner[_, _]] def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] } - + protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass` def apply() = bf.apply() } - + protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr] def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end) @@ -346,7 +287,7 @@ self: ParIterableLike[T, Repr, Sequential] => def mkString: String = seq.mkString("") override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - + def canEqual(other: Any) = true /** Reduces the elements of this sequence using the specified associative binary operator. @@ -383,7 +324,7 @@ self: ParIterableLike[T, Repr, Sequential] => * the elements if the collection is nonempty, and `None` otherwise. */ def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op)) - + /** Folds the elements of this sequence using the specified associative binary operator. * The order in which the elements are reduced is unspecified and may be nondeterministic. * @@ -434,15 +375,11 @@ self: ParIterableLike[T, Repr, Sequential] => def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = { executeAndWaitResult(new Aggregate(z, seqop, combop, splitter)) } - - def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op) - - def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op) - + def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op) - + def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op) - + def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op) def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op) @@ -451,20 +388,6 @@ self: ParIterableLike[T, Repr, Sequential] => def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op) - /* - /** Applies a function `f` to all the elements of $coll. Does so in a nondefined order, - * and in parallel. - * - * $undefinedorder - * - * @tparam U the result type of the function applied to each element, which is always discarded - * @param f function applied to each element - */ - def pareach[U](f: T => U): Unit = { - executeAndWaitResult(new Foreach(f, splitter)) - } - */ - /** Applies a function `f` to all the elements of $coll in a sequential order. * * @tparam U the result type of the function applied to each element, which is always discarded @@ -505,23 +428,23 @@ self: ParIterableLike[T, Repr, Sequential] => reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) } - + def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new Map[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.map(f)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) } otherwise seq.map(f)(bf2seq(bf))*/ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new Collect[S, That](pf, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.collect(pf)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) } otherwise seq.collect(pf)(bf2seq(bf))*/ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new FlatMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.flatMap(f)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) @@ -563,17 +486,48 @@ self: ParIterableLike[T, Repr, Sequential] => def find(pred: T => Boolean): Option[T] = { executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort)) } - - protected[this] def cbfactory ={ - () => newCombiner + + /** Creates a combiner factory. Each combiner factory instance is used + * once per invocation of a parallel transformer method for a single + * collection. + * + * The default combiner factory creates a new combiner every time it + * is requested, unless the combiner is thread-safe as indicated by its + * `canBeShared` method. In this case, the method returns a factory which + * returns the same combiner each time. This is typically done for + * concurrent parallel collections, the combiners of which allow + * thread safe access. + */ + protected[this] def combinerFactory = { + val combiner = newCombiner + if (combiner.canBeShared) new CombinerFactory[T, Repr] { + val shared = combiner + def apply() = shared + def doesShareCombiners = true + } else new CombinerFactory[T, Repr] { + def apply() = newCombiner + def doesShareCombiners = false + } } - + + protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = { + val combiner = cbf() + if (combiner.canBeShared) new CombinerFactory[S, That] { + val shared = combiner + def apply() = shared + def doesShareCombiners = true + } else new CombinerFactory[S, That] { + def apply() = cbf() + def doesShareCombiners = false + } + } + def filter(pred: T => Boolean): Repr = { - executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result }) + executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.result }) } def filterNot(pred: T => Boolean): Repr = { - executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result }) + executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.result }) } def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { @@ -581,9 +535,10 @@ self: ParIterableLike[T, Repr, Sequential] => // println("case both are parallel") val other = that.asParIterable val pbf = bf.asParallel - val copythis = new Copy(() => pbf(repr), splitter) + val cfactory = combinerFactory(() => pbf(repr)) + val copythis = new Copy(cfactory, splitter) val copythat = wrap { - val othtask = new other.Copy(() => pbf(self.repr), other.splitter) + val othtask = new other.Copy(cfactory, other.splitter) tasksupport.executeAndWaitResult(othtask) } val task = (copythis parallel copythat) { _ combine _ } mapResult { @@ -593,7 +548,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else if (bf.isParallel) { // println("case parallel builder, `that` not parallel") val pbf = bf.asParallel - val copythis = new Copy(() => pbf(repr), splitter) + val copythis = new Copy(combinerFactory(() => pbf(repr)), splitter) val copythat = wrap { val cb = pbf(repr) for (elem <- that.seq) cb += elem @@ -610,19 +565,19 @@ self: ParIterableLike[T, Repr, Sequential] => } def partition(pred: T => Boolean): (Repr, Repr) = { - executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) }) + executeAndWaitResult(new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) }) } def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult { - rcb => rcb.groupByKey(cbfactory) + rcb => rcb.groupByKey(() => combinerFactory()) }) } def take(n: Int): Repr = { val actualn = if (size > n) n else size if (actualn < MIN_FOR_COPY) take_sequential(actualn) - else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult { + else executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult { _.result }) } @@ -642,7 +597,7 @@ self: ParIterableLike[T, Repr, Sequential] => def drop(n: Int): Repr = { val actualn = if (size > n) n else size if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn) - else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result }) + else executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.result }) } private def drop_sequential(n: Int) = { @@ -657,7 +612,7 @@ self: ParIterableLike[T, Repr, Sequential] => val from = unc_from min size max 0 val until = unc_until min size max from if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) - else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result }) + else executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.result }) } private def slice_sequential(from: Int, until: Int): Repr = { @@ -672,7 +627,7 @@ self: ParIterableLike[T, Repr, Sequential] => } def splitAt(n: Int): (Repr, Repr) = { - executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) }) + executeAndWaitResult(new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) }) } /** Computes a prefix scan of the elements of the collection. @@ -694,7 +649,7 @@ self: ParIterableLike[T, Repr, Sequential] => val cbf = bf.asParallel if (parallelismLevel > 1) { if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult { + tree => executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => cbf(repr))) mapResult { cb => cb.result }) }) else (cbf(self.repr) += z).result @@ -714,9 +669,15 @@ self: ParIterableLike[T, Repr, Sequential] => * @return the longest prefix of this $coll of elements that satisy the predicate `pred` */ def takeWhile(pred: T => Boolean): Repr = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result }) + val cbf = combinerFactory + if (cbf.doesShareCombiners) { + val parseqspan = toSeq.takeWhile(pred) + executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult { _.result }) + } else { + val cntx = new DefaultSignalling with AtomicIndexFlag + cntx.setIndexFlag(Int.MaxValue) + executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult { _._1.result }) + } } /** Splits this $coll into a prefix/suffix pair according to a predicate. @@ -729,11 +690,22 @@ self: ParIterableLike[T, Repr, Sequential] => * the elements satisfy `pred`, and the rest of the collection */ def span(pred: T => Boolean): (Repr, Repr) = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { - p => (p._1.result, p._2.result) - }) + val cbf = combinerFactory + if (cbf.doesShareCombiners) { + val (xs, ys) = toSeq.span(pred) + val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.result } + val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.result } + val copyall = (copyxs parallel copyys) { + (xr, yr) => (xr, yr) + } + executeAndWaitResult(copyall) + } else { + val cntx = new DefaultSignalling with AtomicIndexFlag + cntx.setIndexFlag(Int.MaxValue) + executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { + p => (p._1.result, p._2.result) + }) + } } /** Drops all elements in the longest prefix of elements that satisfy the predicate, @@ -749,7 +721,7 @@ self: ParIterableLike[T, Repr, Sequential] => def dropWhile(pred: T => Boolean): Repr = { val cntx = new DefaultSignalling with AtomicIndexFlag cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result }) + executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { _._2.result }) } def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) @@ -765,7 +737,7 @@ self: ParIterableLike[T, Repr, Sequential] => def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) { val pbf = bf.asParallel val thatseq = that.asParSeq - executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result }); + executeAndWaitResult(new Zip(combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result }); } else seq.zip(that)(bf2seq(bf)) def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false) @@ -773,15 +745,15 @@ self: ParIterableLike[T, Repr, Sequential] => def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) { val pbf = bf.asParallel val thatseq = that.asParSeq - executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result }); + executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result }); } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)) protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { - executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result }); + executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.result }); } protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = { - executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result }) + executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.result }) } def view = new ParIterableView[T, Repr, Sequential] { @@ -838,8 +810,8 @@ self: ParIterableLike[T, Repr, Sequential] => extends StrictSplitterCheckTask[R, Tp] { protected[this] val pit: IterableSplitter[T] protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] - def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel) - def split = pit.split.map(newSubtask(_)) // default split procedure + def shouldSplitFurther = pit.shouldSplitFurther(self.repr, parallelismLevel) + def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure private[parallel] override def signalAbort = pit.abort override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" } @@ -869,7 +841,7 @@ self: ParIterableLike[T, Repr, Sequential] => /** Sequentially performs one task after another. */ protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) + (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { executeAndWaitResult(ft) @@ -880,7 +852,7 @@ self: ParIterableLike[T, Repr, Sequential] => /** Performs two tasks in parallel, and waits for both to finish. */ protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) + (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { val ftfuture = execute(ft) @@ -903,16 +875,18 @@ self: ParIterableLike[T, Repr, Sequential] => } override def requiresStrictSplitters = inner.requiresStrictSplitters } - + protected trait Transformer[R, Tp] extends Accessor[R, Tp] - - protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] { + + protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Unit, Foreach[S]] { @volatile var result: Unit = () def leaf(prevr: Option[Unit]) = pit.foreach(op) protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p) } - protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] { + protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Int, Count] { // val pittxt = pit.toString @volatile var result: Int = 0 def leaf(prevr: Option[Int]) = result = pit.count(pred) @@ -920,8 +894,9 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Count) = result = result + that.result // override def toString = "CountTask(" + pittxt + ")" } - - protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] { + + protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Reduce[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p) @@ -931,7 +906,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] { + protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Fold[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.fold(z)(op) protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p) @@ -946,21 +922,24 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Aggregate[S]) = result = combop(result, that.result) } - protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] { + protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Sum[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.sum(num) protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p) override def merge(that: Sum[U]) = result = num.plus(result, that.result) } - protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] { + protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Product[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.product(num) protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p) override def merge(that: Product[U]) = result = num.times(result, that.result) } - protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] { + protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Min[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p) @@ -970,7 +949,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] { + protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Max[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p) @@ -980,16 +960,16 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Map[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], Map[S, That]] { @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p) + def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf())) + protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p) override def merge(that: Map[S, That]) = result = result combine that.result } protected[this] class Collect[S, That] - (pf: PartialFunction[T, S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], Collect[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf()) @@ -998,7 +978,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], FlatMap[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) @@ -1010,28 +990,31 @@ self: ParIterableLike[T, Repr, Sequential] => } } - protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] { + protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Boolean, Forall] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) override def merge(that: Forall) = result = result && that.result } - protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] { + protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Boolean, Exists] { @volatile var result: Boolean = false def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) override def merge(that: Exists) = result = result || that.result } - protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] { + protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Find[U]] { @volatile var result: Option[U] = None def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) override def merge(that: Find[U]) = if (this.result == None) result = that.result } - protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Filter[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1041,7 +1024,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Filter[U, This]) = result = result combine that.result } - protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], FilterNot[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1051,7 +1034,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: FilterNot[U, This]) = result = result combine that.result } - protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T]) + protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, That], Copy[U, That]] { @volatile var result: Combiner[U, That] = null def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory())) @@ -1059,11 +1042,12 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Copy[U, That]) = result = result combine that.result } - protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Partition[U >: T, This >: Repr] + (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p) + def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse())) + protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p) override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) } @@ -1090,7 +1074,8 @@ self: ParIterableLike[T, Repr, Sequential] => } } - protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Take[U >: T, This >: Repr] + (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Take[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1098,7 +1083,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, untilp) <- pits zip sizes; if untilp <= n) yield { if (untilp + p.remaining < n) new Take(p.remaining, cbf, p) @@ -1109,13 +1094,14 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Drop[U >: T, This >: Repr] + (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Drop[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield { if (withp - p.remaining > n) new Drop(0, cbf, p) @@ -1126,13 +1112,14 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Slice[U >: T, This >: Repr] + (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Slice[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield { val f = (from max untilp) - untilp @@ -1144,22 +1131,23 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class SplitAt[U >: T, This >: Repr] + (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf())) + def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p) + for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p) } override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) override def requiresStrictSplitters = true } protected[this] class TakeWhile[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] { @volatile var result: (Combiner[U, This], Boolean) = null def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) { @@ -1168,7 +1156,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else result = (reuse(prev.map(_._1), cbf()), false) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p) } override def merge(that: TakeWhile[U, This]) = if (result._2) { @@ -1178,23 +1166,23 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class Span[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) { // val lst = pit.toList // val pa = mutable.ParArray(lst: _*) // val str = "At leaf we will iterate: " + pa.splitter.toList - result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised + result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised // println("\nAt leaf result is: " + result) if (result._2.size > 0) pit.setIndexFlagIfLesser(pos) } else { - result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf()))) + result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter()))) } protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p) + val pits = pit.splitWithSignalling + for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p) } override def merge(that: Span[U, This]) = result = if (result._2.size == 0) { (result._1 combine that.result._1, that.result._2) @@ -1204,15 +1192,15 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) + protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr)) + def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.map(_.remaining) - val opits = othpit.psplit(sizes: _*) + val opits = othpit.psplitWithSignalling(sizes: _*) (pits zip opits) map { p => new Zip(pbf, p._1, p._2) } } override def merge(that: Zip[U, S, That]) = result = result combine that.result @@ -1220,18 +1208,18 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class ZipAll[U >: T, S, That] - (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) + (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] { @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr)) + def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = if (pit.remaining <= len) { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.map(_.remaining) - val opits = othpit.psplit(sizes: _*) + val opits = othpit.psplitWithSignalling(sizes: _*) ((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) } } else { - val opits = othpit.psplit(pit.remaining) + val opits = othpit.psplitWithSignalling(pit.remaining) val diff = len - pit.remaining Seq( new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed @@ -1248,7 +1236,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield { val plen = p.remaining min (len - untilp) new CopyToArray[U, This](from + untilp, plen, array, p) @@ -1257,7 +1245,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T]) + protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, That], ToParCollection[U, That]] { @volatile var result: Result = null def leaf(prev: Option[Combiner[U, That]]) { @@ -1268,7 +1256,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: ToParCollection[U, That]) = result = result combine that.result } - protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) + protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] { @volatile var result: Result = null def leaf(prev: Option[Combiner[(K, V), That]]) { @@ -1305,7 +1293,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else trees(from) protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield { new CreateScanTree(untilp, p.remaining, z, op, p) } @@ -1315,13 +1303,13 @@ self: ParIterableLike[T, Repr, Sequential] => } else result = that.result override def requiresStrictSplitters = true } - + protected[this] class FromScanTree[U >: T, That] - (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CanCombineFrom[Repr, U, That]) + (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That]) extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] { @volatile var result: Combiner[U, That] = null def leaf(prev: Option[Combiner[U, That]]) { - val cb = reuse(prev, cbf(self.repr)) + val cb = reuse(prev, cbf()) iterate(tree, cb) result = cb } @@ -1351,7 +1339,7 @@ self: ParIterableLike[T, Repr, Sequential] => /* scan tree */ - protected[this] def scanBlockSize = (threshold(size, parallelismLevel) / 2) max 1 + protected[this] def scanBlockSize = (thresholdFromSize(size, parallelismLevel) / 2) max 1 protected[this] trait ScanTree[U >: T] { def beginsAt: Int @@ -1391,7 +1379,13 @@ self: ParIterableLike[T, Repr, Sequential] => def rightmost = this def print(depth: Int) = println((" " * depth) + this) } - + + /* alias methods */ + + def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op); + + def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op); + /* debug information */ private[parallel] def debugInformation = "Parallel collection: " + this.getClass diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index beb50a41e1..afd1f30903 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -66,7 +66,6 @@ self => new IterableSplitter[K] { i => val iter = s - var signalDelegate: Signalling = IdleSignalling def hasNext = iter.hasNext def next() = iter.next._1 def split = { @@ -84,7 +83,6 @@ self => new IterableSplitter[V] { i => val iter = s - var signalDelegate: Signalling = IdleSignalling def hasNext = iter.hasNext def next() = iter.next._2 def split = { diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index d0f38b30dc..6a5ee5c69b 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -48,35 +48,6 @@ self => type SuperParIterator = IterableSplitter[T] - /** An iterator that can be split into arbitrary subsets of iterators. - * The self-type requirement ensures that the signal context passing behaviour gets mixed in - * the concrete iterator instance in some concrete collection. - * - * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator - * `reverse2builder` method to ensure higher efficiency. - */ - trait ParIterator extends SeqSplitter[T] with super.ParIterator { - me: SignalContextPassingIterator[ParIterator] => - def split: Seq[ParIterator] - def psplit(sizes: Int*): Seq[ParIterator] - } - - /** A stackable modification that ensures signal contexts get passed along the iterators. - * A self-type requirement in `ParIterator` ensures that this trait gets mixed into - * concrete iterators. - */ - trait SignalContextPassingIterator[+IterRepr <: ParIterator] - extends ParIterator with super.SignalContextPassingIterator[IterRepr] { - // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator` - // to understand why we do the cast here, and have a type parameter. - // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications. - abstract override def psplit(sizes: Int*): Seq[IterRepr] = { - val pits = super.psplit(sizes: _*) - pits foreach { _.signalDelegate = signalDelegate } - pits.asInstanceOf[Seq[IterRepr]] - } - } - /** A more refined version of the iterator found in the `ParallelIterable` trait, * this iterator can be split into arbitrary subsets of iterators. * @@ -89,9 +60,7 @@ self => override def size = length /** Used to iterate elements using indices */ - protected abstract class Elements(start: Int, val end: Int) extends ParIterator with BufferedIterator[T] { - me: SignalContextPassingIterator[ParIterator] => - + protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { private var i = start def hasNext = i < end @@ -106,14 +75,14 @@ self => final def remaining = end - i - def dup = new Elements(i, end) with SignalContextPassingIterator[ParIterator] + def dup = new Elements(i, end) {} def split = psplit(remaining / 2, remaining - remaining / 2) def psplit(sizes: Int*) = { val incr = sizes.scanLeft(0)(_ + _) for ((from, until) <- incr.init zip incr.tail) yield { - new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParIterator] + new Elements(start + from, (start + until) min end) {} } } @@ -138,7 +107,7 @@ self => val realfrom = if (from < 0) 0 else from val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1 + executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 } /** Finds the first element satisfying some predicate. @@ -156,7 +125,7 @@ self => val realfrom = if (from < 0) 0 else from val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx)) + executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) } /** Finds the last element satisfying some predicate. @@ -174,7 +143,7 @@ self => val until = if (end >= length) length else end + 1 val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MinValue) - executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx)) + executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) } def reverse: Repr = { @@ -203,7 +172,7 @@ self => else if (pthat.length > length - offset) false else { val ctx = new DefaultSignalling with VolatileAbort - executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter)) + executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter)) } } otherwise seq.startsWith(that, offset) @@ -213,9 +182,9 @@ self => } otherwise seq.sameElements(that) /** Tests whether this $coll ends with the given parallel sequence. - * + * * $abortsignalling - * + * * @tparam S the type of the elements of `that` sequence * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise @@ -226,7 +195,7 @@ self => else { val ctx = new DefaultSignalling with VolatileAbort val tlen = that.length - executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter)) + executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) } } otherwise seq.endsWith(that) @@ -235,13 +204,14 @@ self => if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) { val that = patch.asParSeq val pbf = bf.asParallel - val pits = splitter.psplit(from, replaced, length - from - realreplaced) - val copystart = new Copy[U, That](() => pbf(repr), pits(0)) + val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) + val cfactory = combinerFactory(() => pbf(repr)) + val copystart = new Copy[U, That](cfactory, pits(0)) val copymiddle = wrap { - val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter) + val tsk = new that.Copy[U, That](cfactory, that.splitter) tasksupport.executeAndWaitResult(tsk) } - val copyend = new Copy[U, That](() => pbf(repr), pits(2)) + val copyend = new Copy[U, That](cfactory, pits(2)) executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { _.result }) @@ -252,7 +222,7 @@ self => val from = 0 max fromarg val b = bf(repr) val repl = (r min (length - from)) max 0 - val pits = splitter.psplit(from, repl, length - from - repl) + val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) b ++= pits(0) b ++= patch b ++= pits(2) @@ -372,7 +342,7 @@ self => } else result = (0, false) protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) } override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) @@ -391,7 +361,7 @@ self => } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) } override def merge(that: IndexWhere) = result = if (result == -1) that.result else { @@ -412,7 +382,7 @@ self => } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) } override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { @@ -437,7 +407,7 @@ self => override def merge(that: ReverseMap[S, That]) = result = that.result combine result } - protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U]) + protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) extends Accessor[Boolean, SameElements[U]] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { @@ -448,7 +418,7 @@ self => override def split = { val fp = pit.remaining / 2 val sp = pit.remaining - fp - for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op) + for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) } override def merge(that: SameElements[U]) = result = result && that.result override def requiresStrictSplitters = true @@ -460,7 +430,7 @@ self => def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) } override def merge(that: Updated[U, That]) = result = result combine that.result @@ -475,8 +445,8 @@ self => override def split = { val fp = len / 2 val sp = len - len / 2 - val pits = pit.psplit(fp, sp) - val opits = otherpit.psplit(fp, sp) + val pits = pit.psplitWithSignalling(fp, sp) + val opits = otherpit.psplitWithSignalling(fp, sp) Seq( new Zip(fp, pbf, pits(0), opits(0)), new Zip(sp, pbf, pits(1), opits(1)) @@ -485,7 +455,7 @@ self => override def merge(that: Zip[U, S, That]) = result = result combine that.result } - protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S]) + protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) extends Accessor[Boolean, Corresponds[S]] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { @@ -496,7 +466,7 @@ self => override def split = { val fp = pit.remaining / 2 val sp = pit.remaining - fp - for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op) + for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) } override def merge(that: Corresponds[S]) = result = result && that.result override def requiresStrictSplitters = true diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index e04e0e9c72..8ed4583419 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -14,6 +14,7 @@ package scala.collection.parallel import scala.collection.Parallel import scala.collection.generic.Signalling import scala.collection.generic.DelegatedSignalling +import scala.collection.generic.IdleSignalling import scala.collection.generic.CanCombineFrom import scala.collection.mutable.Builder import scala.collection.Iterator.empty @@ -27,6 +28,11 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] { * This method doesn't change the state of the iterator. */ def remaining: Int + + /** For most collections, this is a cheap operation. + * Exceptions can override this method. + */ + def isRemainingCheap = true } @@ -111,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) while (hasNext) cb += f(next) cb } @@ -136,7 +142,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = { - b.sizeHint(remaining) + if (isRemainingCheap) b.sizeHint(remaining) while (hasNext) b += next b } @@ -178,7 +184,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { drop(n) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) while (hasNext) cb += next cb } @@ -196,7 +202,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = { before.sizeHint(at) - after.sizeHint(remaining - at) + if (isRemainingCheap) after.sizeHint(remaining - at) var left = at while (left > 0) { before += next @@ -222,7 +228,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ val curr = next if (p(curr)) before += curr else { - after.sizeHint(remaining + 1) + if (isRemainingCheap) after.sizeHint(remaining + 1) after += curr isBefore = false } @@ -262,7 +268,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - cb.sizeHint(remaining min otherpit.remaining) + if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) while (hasNext && otherpit.hasNext) { cb += ((next, otherpit.next)) } @@ -270,7 +276,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - cb.sizeHint(remaining max that.remaining) + if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) while (this.hasNext && that.hasNext) cb += ((this.next, that.next)) while (this.hasNext) cb += ((this.next, thatelem)) while (that.hasNext) cb += ((thiselem, that.next)) @@ -329,7 +335,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter /* transformers */ def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[T]() while (hasNext) lst ::= next while (lst != Nil) { @@ -341,7 +347,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = cbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[S]() while (hasNext) lst ::= f(next) while (lst != Nil) { @@ -353,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = { //val cb = cbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var j = 0 while (hasNext) { if (j == index) { @@ -380,12 +386,22 @@ extends AugmentedIterableIterator[T] with DelegatedSignalling { self => - + + var signalDelegate: Signalling = IdleSignalling + /** Creates a copy of this iterator. */ def dup: IterableSplitter[T] def split: Seq[IterableSplitter[T]] - + + def splitWithSignalling: Seq[IterableSplitter[T]] = { + val pits = split + pits foreach { _.signalDelegate = signalDelegate } + pits + } + + def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel) + /** The number of elements this iterator has yet to traverse. This method * doesn't change the state of the iterator. * @@ -421,7 +437,6 @@ self => /* iterator transformers */ class Taken(taken: Int) extends IterableSplitter[T] { - var signalDelegate = self.signalDelegate var remaining = taken min self.remaining def hasNext = remaining > 0 def next = { remaining -= 1; self.next } @@ -450,7 +465,7 @@ self => override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1) class Mapped[S](f: T => S) extends IterableSplitter[S] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext def next = f(self.next) def remaining = self.remaining @@ -461,7 +476,7 @@ self => override def map[S](f: T => S) = new Mapped(f) class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate protected var curr: IterableSplitter[U] = self def hasNext = if (curr.hasNext) true else if (curr eq self) { curr = that @@ -480,7 +495,7 @@ self => def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that) class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext && that.hasNext def next = (self.next, that.next) def remaining = self.remaining min that.remaining @@ -497,7 +512,7 @@ self => class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S) extends IterableSplitter[(U, S)] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext || that.hasNext def next = if (self.hasNext) { if (that.hasNext) (self.next, that.next) @@ -534,6 +549,18 @@ self => def split: Seq[SeqSplitter[T]] def psplit(sizes: Int*): Seq[SeqSplitter[T]] + override def splitWithSignalling: Seq[SeqSplitter[T]] = { + val pits = split + pits foreach { _.signalDelegate = signalDelegate } + pits + } + + def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = { + val pits = psplit(sizes: _*) + pits foreach { _.signalDelegate = signalDelegate } + pits + } + /** The number of elements this iterator has yet to traverse. This method * doesn't change the state of the iterator. Unlike the version of this method in the supertrait, * method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number @@ -626,13 +653,13 @@ self => def reverse: SeqSplitter[T] = { val pa = mutable.ParArray.fromTraversables(self).reverse - new pa.ParArrayIterator with pa.SCPI { + new pa.ParArrayIterator { override def reverse = self } } class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate private[this] val trio = { val pits = self.psplit(from, replaced, self.remaining - from - replaced) (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2) diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index e785932933..7adf51cffb 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -52,7 +52,7 @@ self => protected[this] override def newCombiner = HashMapCombiner[K, V] - def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI + def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) override def seq = trie @@ -69,11 +69,8 @@ self => case None => newc } - type SCPI = SignalContextPassingIterator[ParHashMapIterator] - class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) - extends super.ParIterator { - self: SignalContextPassingIterator[ParHashMapIterator] => + extends IterableSplitter[(K, V)] { var i = 0 def dup = triter match { case t: TrieIterator[_] => @@ -84,24 +81,24 @@ self => dupFromIterator(buff.iterator) } private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { - val phit = new ParHashMapIterator(it, sz) with SCPI + val phit = new ParHashMapIterator(it, sz) phit.i = i phit } - def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match { + def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { case t: TrieIterator[_] => val previousRemaining = remaining val ((fst, fstlength), snd) = t.split val sndlength = previousRemaining - fstlength Seq( - new ParHashMapIterator(fst, fstlength) with SCPI, - new ParHashMapIterator(snd, sndlength) with SCPI + new ParHashMapIterator(fst, fstlength), + new ParHashMapIterator(snd, sndlength) ) case _ => // iterator of the collision map case val buff = triter.toBuffer val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) with SCPI } + Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } } def next(): (K, V) = { i += 1 diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 8332167b90..1cf0ccd391 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -49,7 +49,7 @@ self => override def empty: ParHashSet[T] = new ParHashSet[T] - def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI + def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) override def seq = trie @@ -66,11 +66,8 @@ self => case None => newc } - type SCPI = SignalContextPassingIterator[ParHashSetIterator] - class ParHashSetIterator(var triter: Iterator[T], val sz: Int) - extends super.ParIterator { - self: SignalContextPassingIterator[ParHashSetIterator] => + extends IterableSplitter[T] { var i = 0 def dup = triter match { case t: TrieIterator[_] => @@ -81,24 +78,24 @@ self => dupFromIterator(buff.iterator) } private def dupFromIterator(it: Iterator[T]) = { - val phit = new ParHashSetIterator(it, sz) with SCPI + val phit = new ParHashSetIterator(it, sz) phit.i = i phit } - def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match { + def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { case t: TrieIterator[_] => val previousRemaining = remaining val ((fst, fstlength), snd) = t.split val sndlength = previousRemaining - fstlength Seq( - new ParHashSetIterator(fst, fstlength) with SCPI, - new ParHashSetIterator(snd, sndlength) with SCPI + new ParHashSetIterator(fst, fstlength), + new ParHashSetIterator(snd, sndlength) ) case _ => // iterator of the collision map case val buff = triter.toBuffer val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) with SCPI } + Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } } def next(): T = { i += 1 @@ -111,6 +108,7 @@ self => } } + /** $factoryInfo * @define Coll immutable.ParHashSet * @define coll immutable parallel hash set @@ -124,6 +122,7 @@ object ParHashSet extends ParSetFactory[ParHashSet] { def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) } + private[immutable] abstract class HashSetCombiner[T] extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { //self: EnvironmentPassingCombiner[T, ParHashSet[T]] => @@ -207,6 +206,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine } } + object HashSetCombiner { def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index 350e64739f..64e07ce4ff 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -10,6 +10,7 @@ package scala.collection.parallel.immutable import scala.collection.immutable.Range import scala.collection.parallel.Combiner +import scala.collection.parallel.SeqSplitter import scala.collection.generic.CanCombineFrom import scala.collection.parallel.IterableSplitter import scala.collection.Iterator @@ -41,13 +42,10 @@ self => @inline final def apply(idx: Int) = range.apply(idx); - def splitter = new ParRangeIterator with SCPI - - type SCPI = SignalContextPassingIterator[ParRangeIterator] + def splitter = new ParRangeIterator class ParRangeIterator(range: Range = self.range) - extends ParIterator { - me: SignalContextPassingIterator[ParRangeIterator] => + extends SeqSplitter[Int] { override def toString = "ParRangeIterator(over: " + range + ")" private var ind = 0 private val len = range.length @@ -64,15 +62,15 @@ self => private def rangeleft = range.drop(ind) - def dup = new ParRangeIterator(rangeleft) with SCPI + def dup = new ParRangeIterator(rangeleft) def split = { val rleft = rangeleft val elemleft = rleft.length - if (elemleft < 2) Seq(new ParRangeIterator(rleft) with SCPI) + if (elemleft < 2) Seq(new ParRangeIterator(rleft)) else Seq( - new ParRangeIterator(rleft.take(elemleft / 2)) with SCPI, - new ParRangeIterator(rleft.drop(elemleft / 2)) with SCPI + new ParRangeIterator(rleft.take(elemleft / 2)), + new ParRangeIterator(rleft.drop(elemleft / 2)) ) } @@ -81,7 +79,7 @@ self => for (sz <- sizes) yield { val fronttaken = rleft.take(sz) rleft = rleft.drop(sz) - new ParRangeIterator(fronttaken) with SCPI + new ParRangeIterator(fronttaken) } } diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala index fdeaefc3ff..5d9c431bc1 100644 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ b/src/library/scala/collection/parallel/immutable/ParVector.scala @@ -48,22 +48,19 @@ extends ParSeq[T] def this() = this(Vector()) - type SCPI = SignalContextPassingIterator[ParVectorIterator] - def apply(idx: Int) = vector.apply(idx) def length = vector.length def splitter: SeqSplitter[T] = { - val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI + val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) vector.initIterator(pit) pit } override def seq: Vector[T] = vector - class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator { - self: SCPI => + class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { def remaining: Int = remainingElementCount def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter def split: Seq[ParVectorIterator] = { diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala index 7b1e39d092..63635537d7 100644 --- a/src/library/scala/collection/parallel/immutable/package.scala +++ b/src/library/scala/collection/parallel/immutable/package.scala @@ -22,23 +22,19 @@ package immutable { override def seq = throw new UnsupportedOperationException def update(idx: Int, elem: T) = throw new UnsupportedOperationException - type SCPI = SignalContextPassingIterator[ParIterator] - - class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator { - me: SignalContextPassingIterator[ParIterator] => - + class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { def remaining = until - i def hasNext = i < until def next = { i += 1; elem } - def dup = new ParIterator(i, until, elem) with SCPI + def dup = new ParIterator(i, until, elem) def psplit(sizes: Int*) = { val incr = sizes.scanLeft(0)(_ + _) - for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) with SCPI + for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) } def split = psplit(remaining / 2, remaining - remaining / 2) } - def splitter = new ParIterator with SCPI + def splitter = new ParIterator } } diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index a1eb3beb0c..72a8184b10 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -19,6 +19,7 @@ import scala.collection.generic.CanBuildFrom import scala.collection.generic.ParFactory import scala.collection.generic.Sizing import scala.collection.parallel.Combiner +import scala.collection.parallel.SeqSplitter import scala.collection.parallel.ParSeqLike import scala.collection.parallel.CHECK_RATE import scala.collection.mutable.ArraySeq @@ -74,17 +75,13 @@ self => override def seq = arrayseq - type SCPI = SignalContextPassingIterator[ParArrayIterator] - protected[parallel] def splitter: ParArrayIterator = { - val pit = new ParArrayIterator with SCPI + val pit = new ParArrayIterator pit } class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) - extends super.ParIterator { - me: SignalContextPassingIterator[ParArrayIterator] => - + extends SeqSplitter[T] { def hasNext = i < until def next = { @@ -95,9 +92,9 @@ self => def remaining = until - i - def dup = new ParArrayIterator(i, until, arr) with SCPI + def dup = new ParArrayIterator(i, until, arr) - def psplit(sizesIncomplete: Int*): Seq[ParIterator] = { + def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { var traversed = i val total = sizesIncomplete.reduceLeft(_ + _) val left = remaining @@ -106,19 +103,19 @@ self => val start = traversed val end = (traversed + sz) min until traversed = end - new ParArrayIterator(start, end, arr) with SCPI + new ParArrayIterator(start, end, arr) } else { - new ParArrayIterator(traversed, traversed, arr) with SCPI + new ParArrayIterator(traversed, traversed, arr) } } - override def split: Seq[ParIterator] = { + override def split: Seq[ParArrayIterator] = { val left = remaining if (left >= 2) { val splitpoint = left / 2 val sq = Seq( - new ParArrayIterator(i, i + splitpoint, arr) with SCPI, - new ParArrayIterator(i + splitpoint, until, arr) with SCPI) + new ParArrayIterator(i, i + splitpoint, arr), + new ParArrayIterator(i + splitpoint, until, arr)) i = until sq } else { diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala new file mode 100644 index 0000000000..86624500fd --- /dev/null +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -0,0 +1,147 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.parallel.mutable + + + +import scala.collection.generic._ +import scala.collection.parallel.Combiner +import scala.collection.parallel.IterableSplitter +import scala.collection.mutable.Ctrie +import scala.collection.mutable.CtrieIterator + + + +/** Parallel Ctrie collection. + * + * It has its bulk operations parallelized, but uses the snapshot operation + * to create the splitter. This means that parallel bulk operations can be + * called concurrently with the modifications. + * + * @author Aleksandar Prokopec + * @since 2.10 + */ +final class ParCtrie[K, V] private[collection] (private val ctrie: Ctrie[K, V]) +extends ParMap[K, V] + with GenericParMapTemplate[K, V, ParCtrie] + with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]] + with ParCtrieCombiner[K, V] + with Serializable +{ + + def this() = this(new Ctrie) + + override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie + + override def empty: ParCtrie[K, V] = ParCtrie.empty + + protected[this] override def newCombiner = ParCtrie.newCombiner + + override def seq = ctrie + + def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true) + + override def size = ctrie.size + + override def clear() = ctrie.clear() + + def result = this + + def get(key: K): Option[V] = ctrie.get(key) + + def put(key: K, value: V): Option[V] = ctrie.put(key, value) + + def update(key: K, value: V): Unit = ctrie.update(key, value) + + def remove(key: K): Option[V] = ctrie.remove(key) + + def +=(kv: (K, V)): this.type = { + ctrie.+=(kv) + this + } + + def -=(key: K): this.type = { + ctrie.-=(key) + this + } + + override def stringPrefix = "ParCtrie" + +} + + +private[collection] class ParCtrieSplitter[K, V](lev: Int, ct: Ctrie[K, V], mustInit: Boolean) +extends CtrieIterator[K, V](lev, ct, mustInit) + with IterableSplitter[(K, V)] +{ + // only evaluated if `remaining` is invoked (which is not used by most tasks) + //lazy val totalsize = ct.iterator.size /* TODO improve to lazily compute sizes */ + def totalsize: Int = throw new UnsupportedOperationException + var iterated = 0 + + protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit) + + override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = { + val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) + level < maxsplits + } + + def dup = null // TODO necessary for views + + override def next() = { + iterated += 1 + super.next() + } + + def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] + + override def isRemainingCheap = false + + def remaining: Int = totalsize - iterated +} + + +/** Only used within the `ParCtrie`. */ +private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] { + + def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else { + throw new UnsupportedOperationException("This shouldn't have been called in the first place.") + + val thiz = this.asInstanceOf[ParCtrie[K, V]] + val that = other.asInstanceOf[ParCtrie[K, V]] + val result = new ParCtrie[K, V] + + result ++= thiz.iterator + result ++= that.iterator + + result + } + + override def canBeShared = true + +} + + +object ParCtrie extends ParMapFactory[ParCtrie] { + + def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V] + + def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V] + + implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V] + +} + + + + + + + + diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 31750b0b0d..15ffd3fdd2 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -12,7 +12,6 @@ package mutable - import collection.generic._ import collection.mutable.DefaultEntry import collection.mutable.HashEntry @@ -56,7 +55,7 @@ self => override def seq = new collection.mutable.HashMap[K, V](hashTableContents) - def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI + def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) override def size = tableSize @@ -93,14 +92,11 @@ self => override def stringPrefix = "ParHashMap" - type SCPI = SignalContextPassingIterator[ParHashMapIterator] - class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) - extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) with ParIterator { - me: SCPI => + extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value); def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = - new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI + new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) } private def writeObject(out: java.io.ObjectOutputStream) { diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 7763cdf318..6c5f513ad0 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -66,14 +66,11 @@ extends ParSet[T] def contains(elem: T) = containsEntry(elem) - def splitter = new ParHashSetIterator(0, table.length, size) with SCPI - - type SCPI = SignalContextPassingIterator[ParHashSetIterator] + def splitter = new ParHashSetIterator(0, table.length, size) class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) - extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) with ParIterator { - me: SCPI => - def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI + extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { + def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) } private def writeObject(s: java.io.ObjectOutputStream) { diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index 9b8e233b95..8c93732427 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -29,7 +29,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m /** A parallel iterator returning all the entries. */ abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] - (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) + (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) extends IterableSplitter[T] with SizeMapUtils { private val itertable = table private var traversed = 0 diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index f152629c50..8f19d0ecdb 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -83,6 +83,7 @@ package object parallel { } } + package parallel { trait FactoryOps[From, Elem, To] { trait Otherwise[R] { @@ -113,7 +114,19 @@ package parallel { } /* classes */ - + + trait CombinerFactory[U, Repr] { + /** Provides a combiner used to construct a collection. */ + def apply(): Combiner[U, Repr] + /** The call to the `apply` method can create a new combiner each time. + * If it does, this method returns `false`. + * The same combiner factory may be used each time (typically, this is + * the case for concurrent collections, which are thread safe). + * If so, the method returns `true`. + */ + def doesShareCombiners: Boolean + } + /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ final case class CompositeThrowable( val throwables: Set[Throwable] @@ -127,8 +140,9 @@ package parallel { * Automatically forwards the signal delegate when splitting. */ private[parallel] class BufferSplitter[T] - (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling) + (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling) extends IterableSplitter[T] { + signalDelegate = _sigdel def hasNext = index < until def next = { val r = buffer(index) @@ -182,22 +196,23 @@ package parallel { * the receiver (which will be the return value). */ private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] - (private val bucketnumber: Int) + (private val bucketnumber: Int) extends Combiner[Elem, To] { //self: EnvironmentPassingCombiner[Elem, To] => protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber) protected var sz: Int = 0 - + def size = sz - + def clear() = { buckets = new Array[UnrolledBuffer[Buck]](bucketnumber) sz = 0 } - + def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} + def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - + def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = { if (this eq other) this else other match { diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala index 52705d302c..f28264c7a2 100644 --- a/src/library/scala/reflect/Code.scala +++ b/src/library/scala/reflect/Code.scala @@ -11,12 +11,14 @@ package scala.reflect /** This type is required by the compiler and <b>should not be used in client code</b>. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10") class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) { val manifest = implicitly[Manifest[T]] override def toString = "Code(tree = "+tree+", manifest = "+manifest+")" } /** This type is required by the compiler and <b>should not be used in client code</b>. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10") object Code { def lift[A](tree: A): Code[A] = throw new Error("Code was not lifted by compiler") diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 8bd45c0e33..6c02878b19 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -222,7 +222,7 @@ object Manifest { val clazz = classToSymbol(erasure) val pre = prefix match { case Some(pm) => pm.tpe - case None => clazz.owner.thisType + case None => clazz.owner.thisPrefix } namedType(pre, clazz, typeArguments map (_.tpe)) } diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala index 136f52b05f..448dca752c 100644 --- a/src/library/scala/reflect/api/Mirror.scala +++ b/src/library/scala/reflect/api/Mirror.scala @@ -3,57 +3,59 @@ package api /** A mirror establishes connections of * runtime entities such as class names and object instances - * with a refexive universe. + * with a reflexive universe. */ trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil { /** The Scala class symbol that has given fully qualified name * @param name The fully qualified name of the class to be returned - * @throws java.lang.ClassNotFoundException if no class wiht that name exists + * @throws java.lang.ClassNotFoundException if no class with that name exists * to do: throws anything else? */ - def classWithName(name: String): Symbol + def symbolForName(name: String): Symbol - /** Return a reference to the companion object of this class symbol + /** Return a reference to the companion object of the given class symbol. */ - def getCompanionObject(clazz: Symbol): AnyRef + def companionInstance(clazz: Symbol): AnyRef - /** The Scala class symbol corresponding to the runtime class of given object - * @param The object from which the class is returned + /** The Scala class symbol corresponding to the runtime class of the given instance. + * @param instance The instance + * @return The class Symbol for the instance * @throws ? */ - def getClass(obj: AnyRef): Symbol + def symbolOfInstance(instance: Any): Symbol - /** The Scala type corresponding to the runtime type of given object. + /** The Scala type corresponding to the runtime type of given instance. * If the underlying class is parameterized, this will be an existential type, * with unknown type arguments. * - * @param The object from which the type is returned + * @param instance The instance. + * @return The Type of the given instance. * @throws ? */ - def getType(obj: AnyRef): Type + def typeOfInstance(instance: Any): Type /** The value of a field on a receiver instance. * @param receiver The receiver instance * @param field The field * @return The value contained in `receiver.field`. */ - def getValue(receiver: AnyRef, field: Symbol): Any + def getValueOfField(receiver: AnyRef, field: Symbol): Any /** Sets the value of a field on a receiver instance. * @param receiver The receiver instance * @param field The field * @param value The new value to be stored in the field. */ - def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit + def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit - /** Invokes a method on a reciver instance with some arguments + /** Invokes a method on a receiver instance with some arguments * @param receiver The receiver instance * @param meth The method * @param args The method call's arguments * @return The result of invoking `receiver.meth(args)` */ - def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any + def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any /** Maps a Java class to a Scala type reference * @param clazz The Java class object diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala index 8569b103cf..c0123ed955 100644 --- a/src/library/scala/reflect/api/Modifier.scala +++ b/src/library/scala/reflect/api/Modifier.scala @@ -1,11 +1,82 @@ package scala.reflect.api -object Modifier extends Enumeration { +import collection.{ immutable, mutable } - val `protected`, `private`, `override`, `abstract`, `final`, - `sealed`, `implicit`, `lazy`, `macro`, `case`, `trait`, - deferred, interface, mutable, parameter, covariant, contravariant, - preSuper, abstractOverride, local, java, static, caseAccessor, - defaultParameter, defaultInit, paramAccessor, bynameParameter = Value +sealed abstract class Modifier { + def name: String + def isKeyword: Boolean + def sourceString: String = if (isKeyword) "`" + name + "`" else name + override def equals(that: Any) = this eq that.asInstanceOf[AnyRef] + override def hashCode = name.hashCode + override def toString = name +} +final class SymbolModifier private (val name: String, val isKeyword: Boolean) extends Modifier { + def this(name: String) = this(name, false) +} +final class SourceModifier private (val name: String) extends Modifier { + def isKeyword = true +} + +object SymbolModifier { + private val seen = mutable.ListBuffer[SymbolModifier]() + private[api] def apply(name: String): SymbolModifier = { + val mod = name match { + case "case" | "trait" => new SymbolModifier(name, isKeyword = true) + case _ => new SymbolModifier(name) + } + seen += mod + mod + } + private[api] def all = seen.toList +} +object SourceModifier { + private val seen = mutable.ListBuffer[SourceModifier]() + private[api] def apply(name: String): SourceModifier = { + val mod = new SourceModifier(name) + seen += mod + mod + } + private[api] def all = seen.toList +} + +object Modifier extends immutable.Set[Modifier] { + val `abstract` = SourceModifier("abstract") + val `final` = SourceModifier("final") + val `implicit` = SourceModifier("implicit") + val `lazy` = SourceModifier("lazy") + val `macro` = SourceModifier("macro") + val `override` = SourceModifier("override") + val `private` = SourceModifier("private") + val `protected` = SourceModifier("protected") + val `sealed` = SourceModifier("sealed") + + val `case` = SymbolModifier("case") + val `trait` = SymbolModifier("trait") + val abstractOverride = SymbolModifier("abstractOverride") + val bynameParameter = SymbolModifier("bynameParameter") + val caseAccessor = SymbolModifier("caseAccessor") + val contravariant = SymbolModifier("contravariant") + val covariant = SymbolModifier("covariant") + val defaultInit = SymbolModifier("defaultInit") + val defaultParameter = SymbolModifier("defaultParameter") + val deferred = SymbolModifier("deferred") + val interface = SymbolModifier("interface") + val java = SymbolModifier("java") + val local = SymbolModifier("local") + val mutable = SymbolModifier("mutable") + val paramAccessor = SymbolModifier("paramAccessor") + val parameter = SymbolModifier("parameter") + val preSuper = SymbolModifier("preSuper") + val static = SymbolModifier("static") + + val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet + val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet + val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers + def values = allModifiers + + def contains(key: Modifier) = allModifiers(key) + def iterator = allModifiers.iterator + def -(elem: Modifier) = allModifiers - elem + def +(elem: Modifier) = allModifiers + elem } diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala index 9498f0af36..3a00f21c8c 100755 --- a/src/library/scala/reflect/api/Names.scala +++ b/src/library/scala/reflect/api/Names.scala @@ -11,7 +11,6 @@ package api * `name1 == name2` implies `name1 eq name2`. */ trait Names { - /** The abstract type of names */ type Name >: Null <: AbsName @@ -37,12 +36,20 @@ trait Names { /** Replaces all occurrences of $op_names in this name by corresponding operator symbols. * Example: `foo_+=` becomes `foo_$plus$eq`. */ - def decode: String + def decoded: String /** Replaces all occurrences of operator symbols in this name by corresponding $op_names. * Example: `foo_$plus$eq` becomes `foo_+=` */ - def encode: Name + def encoded: String + + /** The decoded name, still represented as a name. + */ + def decodedName: Name + + /** The encoded name, still represented as a name. + */ + def encodedName: Name } /** Create a new term name. diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala index 3526cf259d..e737b0ea4f 100755 --- a/src/library/scala/reflect/api/StandardDefinitions.scala +++ b/src/library/scala/reflect/api/StandardDefinitions.scala @@ -11,14 +11,11 @@ trait StandardDefinitions { self: Universe => val definitions: AbsDefinitions abstract class AbsDefinitions { - // outer packages and their classes - def RootPackage: Symbol // under consideration + // packages + def RootPackage: Symbol def RootClass: Symbol def EmptyPackage: Symbol - def EmptyPackageClass: Symbol - def ScalaPackage: Symbol - def ScalaPackageClass: Symbol // top types def AnyClass : Symbol @@ -54,17 +51,19 @@ trait StandardDefinitions { self: Universe => // fundamental modules def PredefModule: Symbol - // fundamental type constructions - def ClassType(arg: Type): Type + /** Given a type T, returns the type corresponding to the VM's + * representation: ClassClass's type constructor applied to `arg`. + */ + def vmClassType(arg: Type): Type // !!! better name? /** The string representation used by the given type in the VM. */ - def signature(tp: Type): String + def vmSignature(sym: Symbol, info: Type): String /** Is symbol one of the value classes? */ - def isValueClass(sym: Symbol): Boolean + def isValueClass(sym: Symbol): Boolean // !!! better name? /** Is symbol one of the numeric value classes? */ - def isNumericValueClass(sym: Symbol): Boolean + def isNumericValueClass(sym: Symbol): Boolean // !!! better name? } } diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala new file mode 100644 index 0000000000..81517d2a6b --- /dev/null +++ b/src/library/scala/reflect/api/StandardNames.scala @@ -0,0 +1,21 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.reflect +package api + +trait StandardNames { self: Universe => + + val nme: AbsTermNames + + abstract class AbsTermNames { + val CONSTRUCTOR: TermName + } + + val tpnme: AbsTypeNames + + abstract class AbsTypeNames { + } +} diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala index 17d9b06324..15d754b5b4 100755 --- a/src/library/scala/reflect/api/Symbols.scala +++ b/src/library/scala/reflect/api/Symbols.scala @@ -9,11 +9,20 @@ trait Symbols { self: Universe => /** The modifiers of this symbol */ - def allModifiers: Set[Modifier.Value] + def modifiers: Set[Modifier] /** Does this symbol have given modifier? */ - def hasModifier(mod: Modifier.Value): Boolean + def hasModifier(mod: Modifier): Boolean + + /** A list of annotations attached to this Symbol. + */ + def annotations: List[self.AnnotationInfo] + + /** Whether this symbol carries an annotation for which the given + * symbol is its typeSymbol. + */ + def hasAnnotation(sym: Symbol): Boolean /** The owner of this symbol. This is the symbol * that directly contains the current symbol's definition. @@ -30,14 +39,6 @@ trait Symbols { self: Universe => */ def name: Name - /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`. - */ - def encodedName: String - - /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`. - */ - def decodedName: String - /** The encoded full path name of this symbol, where outer names and inner names * are separated by periods. */ @@ -66,49 +67,43 @@ trait Symbols { self: Universe => * * The java access levels translate as follows: * - * java private: hasFlag(PRIVATE) && !hasAccessBoundary - * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package) - * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package) - * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary + * java private: hasFlag(PRIVATE) && (privateWithin == NoSymbol) + * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosingPackage) + * java protected: hasFlag(PROTECTED) && (privateWithin == enclosingPackage) + * java public: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == NoSymbol) */ def privateWithin: Symbol - /** Whether this symbol has a "privateWithin" visibility barrier attached. - */ - def hasAccessBoundary: Boolean - - /** A list of annotations attached to this Symbol. - */ - def getAnnotations: List[self.AnnotationInfo] - /** For a class: the module or case class factory with the same name in the same package. + * For a module: the class with the same name in the same package. * For all others: NoSymbol */ - def companionModule: Symbol - - /** For a module: the class with the same name in the same package. - * For all others: NoSymbol - */ - def companionClass: Symbol - - /** The module corresponding to this module class (note that this - * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass - */ - def sourceModule: Symbol + def companionSymbol: Symbol /** If symbol is an object definition, its implied associated class, * otherwise NoSymbol */ def moduleClass: Symbol // needed for LiftCode - /** The top-level class containing this symbol. */ - def toplevelClass: Symbol + /** If this symbol is a top-level class, this symbol; otherwise the next enclosing + * top-level class, or `NoSymbol` if none exists. + */ + def enclosingTopLevelClass: Symbol - /** The next enclosing class, or `NoSymbol` if none exists */ - def enclClass : Symbol + /** If this symbol is a class, this symbol; otherwise the next enclosing + * class, or `NoSymbol` if none exists. + */ + def enclosingClass: Symbol - /** The next enclosing method, or `NoSymbol` if none exists */ - def enclMethod : Symbol + /** If this symbol is a method, this symbol; otherwise the next enclosing + * method, or `NoSymbol` if none exists. + */ + def enclosingMethod: Symbol + + /** If this symbol is a package class, this symbol; otherwise the next enclosing + * package class, or `NoSymbol` if none exists. + */ + def enclosingPackageClass: Symbol /** Does this symbol represent the definition of term? * Note that every symbol is either a term or a type. @@ -141,13 +136,13 @@ trait Symbols { self: Universe => /** The type signature of this symbol. * Note if the symbol is a member of a class, one almost always is interested - * in `typeSigIn` with a site type instead. + * in `typeSignatureIn` with a site type instead. */ - def typeSig: Type + def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name. /** The type signature of this symbol seen as a member of given type `site`. */ - def typeSigIn(site: Type): Type + def typeSignatureIn(site: Type): Type /** A type reference that refers to this type symbol * Note if symbol is a member of a class, one almost always is interested @@ -156,11 +151,11 @@ trait Symbols { self: Universe => * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol * `C`. Then `C.asType` is the type `C[T]`. * - * By contrast, `C.typeSig` would be a type signature of form + * By contrast, `C.typeSignature` would be a type signature of form * `PolyType(ClassInfoType(...))` that describes type parameters, value * parameters, parent types, and members of `C`. */ - def asType: Type + def asType: Type // !!! Same as typeSignature. /** A type reference that refers to this type symbol seen * as a member of given type `site`. @@ -172,37 +167,37 @@ trait Symbols { self: Universe => * are part of results of `asType`, but not of `asTypeConstructor`. * * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol - * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeCponstructor` is `C`. + * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`. */ def asTypeConstructor: Type // needed by LiftCode + + /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`. + */ + def thisPrefix: Type /** If this symbol is a class or trait, its self type, otherwise the type * of the symbol itself. */ - def typeOfThis: Type - - /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`. - */ - def thisType: Type + def selfType: Type /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has * the current symbol as its owner. */ def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode - + /** Low-level operation to set the symbol's flags * @return the symbol itself */ - def setInternalFlags(flags: Long): this.type // needed by LiftCode + def setInternalFlags(flags: Long): this.type // needed by LiftCode !!! not enough reason to have in the api /** Set symbol's type signature to given type * @return the symbol itself */ - def setTypeSig(tpe: Type): this.type // needed by LiftCode + def setTypeSignature(tpe: Type): this.type // needed by LiftCode !!! not enough reason to have in the api /** Set symbol's annotations to given annotations `annots`. */ - def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode + def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode !!! not enough reason to have in the api } val NoSymbol: Symbol diff --git a/src/library/scala/reflect/api/TreeBuildUtil.scala b/src/library/scala/reflect/api/TreeBuildUtil.scala index b437824925..f28008bc21 100644 --- a/src/library/scala/reflect/api/TreeBuildUtil.scala +++ b/src/library/scala/reflect/api/TreeBuildUtil.scala @@ -3,19 +3,19 @@ package scala.reflect.api trait TreeBuildUtil extends Universe { /** The symbol corresponding to the globally accessible class with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def staticClass(fullname: String): Symbol + def staticClass(fullName: String): Symbol /** The symbol corresponding to the globally accessible object with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def staticModule(fullname: String): Symbol + def staticModule(fullName: String): Symbol /** The this-ptype of the globally accessible object with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def thisModuleType(fullname: String): Type + def thisModuleType(fullName: String): Type /** Selects type symbol with given simple name `name` from the defined members of `owner`. */ @@ -38,7 +38,7 @@ trait TreeBuildUtil extends Universe { * @param tsig the type signature of the free variable * @param value the value of the free variable at runtime */ - def freeVar(name: String, tsig: Type, value: Any): Symbol + def newFreeVar(name: String, info: Type, value: Any): Symbol /** Create a Modiiers structure given internal flags, qualifier, annotations */ def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala index 88ef450ed9..19bfd09b81 100644 --- a/src/library/scala/reflect/api/TreePrinters.scala +++ b/src/library/scala/reflect/api/TreePrinters.scala @@ -31,18 +31,6 @@ trait TreePrinters { self: Universe => // emits more or less verbatim representation of the provided tree // todo. when LiftCode becomes a macro, throw this code away and use that macro class RawTreePrinter(out: PrintWriter) extends TreePrinter { - import scala.reflect.api.Modifier - import scala.reflect.api.Modifier._ - - def copypasteModifier(mod: Modifier.Value): String = mod match { - case mod @ ( - `protected` | `private` | `override` | - `abstract` | `final` | `sealed` | - `implicit` | `lazy` | `macro` | - `case` | `trait`) => "`" + mod.toString + "`" - case mod => mod.toString - } - def print(args: Any*): Unit = args foreach { case EmptyTree => print("EmptyTree") @@ -77,14 +65,14 @@ trait TreePrinters { self: Universe => print(")") case mods: Modifiers => val parts = collection.mutable.ListBuffer[String]() - parts += "Set(" + mods.allModifiers.map{copypasteModifier}.mkString(", ") + ")" + parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")" parts += "newTypeName(\"" + mods.privateWithin.toString + "\")" parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")" var keep = 3 if (keep == 3 && mods.annotations.isEmpty) keep -= 1 if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1 - if (keep == 1 && mods.allModifiers.isEmpty) keep -= 1 + if (keep == 1 && mods.modifiers.isEmpty) keep -= 1 print("Modifiers(", parts.take(keep).mkString(", "), ")") case name: Name => diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 0a38fb45bf..32940cbcd6 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -16,14 +16,14 @@ trait Trees { self: Universe => type Modifiers <: AbsModifiers abstract class AbsModifiers { - def hasModifier(mod: Modifier.Value): Boolean - def allModifiers: Set[Modifier.Value] + def modifiers: Set[Modifier] + def hasModifier(mod: Modifier): Boolean def privateWithin: Name // default: EmptyTypeName def annotations: List[Tree] // default: List() def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers } - def Modifiers(mods: Set[Modifier.Value] = Set(), + def Modifiers(mods: Set[Modifier] = Set(), privateWithin: Name = EmptyTypeName, annotations: List[Tree] = List()): Modifiers @@ -476,6 +476,17 @@ trait Trees { self: Universe => */ case class New(tpt: Tree) extends TermTree + /** Factory method for object creation `new tpt(args_1)...(args_n)` + * A `New(t, as)` is expanded to: `(new t).<init>(as)` + */ + def New(tpt: Tree, argss: List[List[Tree]]): Tree = { + assert(!argss.isEmpty) + // todo. we need to expose names in scala.reflect.api +// val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) + val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) + (superRef /: argss) (Apply) + } + /** Type annotation, eliminated by explicit outer */ case class Typed(expr: Tree, tpt: Tree) extends TermTree @@ -632,10 +643,10 @@ trait Trees { self: Universe => } def TypeTree(tp: Type): TypeTree = TypeTree() setType tp - + /** An empty deferred value definition corresponding to: * val _: _ - * This is used as a placeholder in the `self` parameter Template if there is + * This is used as a placeholder in the `self` parameter Template if there is * no definition of a self value of self type. */ def emptyValDef: ValDef @@ -1129,9 +1140,9 @@ trait Trees { self: Universe => abstract class Transformer { val treeCopy: TreeCopier = newLazyTreeCopier protected var currentOwner: Symbol = definitions.RootClass - protected def currentMethod = currentOwner.enclMethod - protected def currentClass = currentOwner.enclClass - protected def currentPackage = currentOwner.toplevelClass.owner + protected def currentMethod = currentOwner.enclosingMethod + protected def currentClass = currentOwner.enclosingClass + protected def currentPackage = currentOwner.enclosingTopLevelClass.owner def transform(tree: Tree): Tree = tree match { case EmptyTree => tree diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala index 6185a788ae..8a91956320 100755 --- a/src/library/scala/reflect/api/Types.scala +++ b/src/library/scala/reflect/api/Types.scala @@ -6,7 +6,6 @@ trait Types { self: Universe => /** This class declares operations that are visible in a Type. */ abstract class AbsType { - /** The type symbol associated with the type, or `NoSymbol` for types * that do not refer to a type symbol. */ @@ -47,7 +46,7 @@ trait Types { self: Universe => /** Substitute types in `to` for corresponding occurrences of references to * symbols `from` in this type. */ - def subst(from: List[Symbol], to: List[Type]): Type + def substituteTypes(from: List[Symbol], to: List[Type]): Type // !!! Too many things with names like "subst" /** If this is a parameterized types, the type arguments. * Otherwise the empty list @@ -56,7 +55,7 @@ trait Types { self: Universe => /** Is this type a type constructor that is missing its type arguments? */ - def isHigherKinded: Boolean + def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no? /** * Expands type aliases and converts higher-kinded TypeRefs to PolyTypes. @@ -66,7 +65,7 @@ trait Types { self: Universe => * TypeRef(pre, <List>, List()) is replaced by * PolyType(X, TypeRef(pre, <List>, List(X))) */ - def normalize: Type + def normalize: Type // !!! Alternative name? "normalize" is used to mean too many things. /** Does this type conform to given type argument `that`? */ def <:< (that: Type): Boolean @@ -74,11 +73,11 @@ trait Types { self: Universe => /** Is this type equivalent to given type argument `that`? */ def =:= (that: Type): Boolean - /** The list of all baseclasses of this type (including its own typeSymbol) + /** The list of all base classes of this type (including its own typeSymbol) * in reverse linearization order, starting with the class itself and ending * in class Any. */ - def baseClasses: List[Symbol] + def baseClasses: List[Symbol] // !!! Alternative name, perhaps linearization? /** The least type instance of given class which is a supertype * of this type. Example: @@ -104,9 +103,9 @@ trait Types { self: Universe => def asSeenFrom(pre: Type, clazz: Symbol): Type /** The erased type corresponding to this type after - * all transcformations from Scala to Java have been performed. + * all transformations from Scala to Java have been performed. */ - def erasedType: Type + def erasedType: Type // !!! "erasedType", compare with "widen" (so "erase") or "underlying" (so "erased") /** Apply `f` to each part of this type, returning * a new type. children get mapped before their parents */ @@ -138,7 +137,7 @@ trait Types { self: Universe => /** If this is a singleton type, widen it to its nearest underlying non-singleton * base type by applying one or more `underlying` dereferences. - * If this is not a singlecon type, returns this type itself. + * If this is not a singleton type, returns this type itself. * * Example: * @@ -400,11 +399,6 @@ trait Types { self: Universe => def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)] } - - - - - abstract class NullaryMethodTypeExtractor { def apply(resultType: Type): NullaryMethodType def unapply(tpe: NullaryMethodType): Option[(Type)] diff --git a/src/library/scala/reflect/api/Universe.scala b/src/library/scala/reflect/api/Universe.scala index 03acbdda2c..a3cec3271b 100755 --- a/src/library/scala/reflect/api/Universe.scala +++ b/src/library/scala/reflect/api/Universe.scala @@ -10,7 +10,8 @@ abstract class Universe extends Symbols with Positions with TreePrinters with AnnotationInfos - with StandardDefinitions { + with StandardDefinitions + with StandardNames { type Position val NoPosition: Position diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala index d0a2787fdf..ebbd4735e5 100644 --- a/src/library/scala/reflect/macro/Context.scala +++ b/src/library/scala/reflect/macro/Context.scala @@ -12,4 +12,25 @@ trait Context extends api.Universe { */ def referenceCapturedVariable(id: Ident): Tree + /** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type. + * For instance, given the abstract syntax tree representation of the `x + 1` expression: + * + * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1)))) + * + * The reifier transforms it to the following tree: + * + * $mr.Apply($mr.Select($mr.Ident($mr.newFreeVar("x", <Int>, x), "+"), List($mr.Literal($mr.Constant(1)))))) + * + * The transformation looks mostly straightforward, but it has its tricky parts: + * * Reifier retains symbols and types defined outside the reified tree, however + * locally defined entities get erased and replaced with their original trees + * * Free variables are detected and wrapped in symbols of the type FreeVar + * * Mutable variables that are accessed from a local function are wrapped in refs + * * Since reified trees can be compiled outside of the scope they've been created in, + * special measures are taken to ensure that all freeVars remain visible + * + * Typical usage of this function is to retain some of the trees received/created by a macro + * into the form that can be inspected (via pattern matching) or compiled/run (by a reflective ToolBox) during the runtime. + */ + def reify(tree: Tree): Tree } |