From 540ad0223ae26c0deae250c3ace2092904290a8b Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 17 Dec 2011 19:36:49 +0100 Subject: Use RedBlack.iterator to create iterators for TreeSet/TreeMap. This turns iterator creation from an O(n) operation into an O(log n) operation. Unfortunately, it halves actual iteration speed (consuming the iterator fully), probably due to the many by-name closures that are needed. --- src/library/scala/collection/immutable/TreeMap.scala | 2 +- src/library/scala/collection/immutable/TreeSet.scala | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index ef0eac3701..2fd5208991 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -153,7 +153,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va * * @return the new iterator */ - def iterator: Iterator[(A, B)] = tree.toStream.iterator + def iterator: Iterator[(A, B)] = tree.iterator override def toStream: Stream[(A, B)] = tree.toStream diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 8b90ece143..05f27d0d93 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -107,7 +107,7 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) * * @return the new iterator */ - def iterator: Iterator[A] = tree.toStream.iterator map (_._1) + def iterator: Iterator[A] = tree.iterator map (_._1) override def toStream: Stream[A] = tree.toStream map (_._1) -- cgit v1.2.3 From 88ed93063419f6d09026e0ae466fe530f69af551 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 17 Dec 2011 21:18:48 +0100 Subject: Use custom implementation for iterating over RedBlack trees. Raw performance is much better than '++' based iterator. --- .../scala/collection/immutable/RedBlack.scala | 36 +++++++++++++++++++--- 1 file changed, 31 insertions(+), 5 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 9906c9896e..097df54af2 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -149,11 +149,9 @@ abstract class RedBlack[A] extends Serializable { def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest - def toStream: Stream[(A,B)] = - left.toStream ++ Stream((key,value)) ++ right.toStream + def toStream: Stream[(A,B)] = iterator.toStream - def iterator: Iterator[(A, B)] = - left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator + def iterator: Iterator[(A, B)] = new TreeIterator(this) def foreach[U](f: (A, B) => U) { left foreach f @@ -286,6 +284,34 @@ abstract class RedBlack[A] extends Serializable { override val right: Tree[B]) extends NonEmpty[B] { def isBlack = true } -} + private[this] class TreeIterator[B](tree: NonEmpty[B]) extends Iterator[(A, B)] { + import collection.mutable.Stack + + override def hasNext: Boolean = !next.isEmpty + override def next: (A, B) = next match { + case Empty => + throw new NoSuchElementException("next on empty iterator") + case tree: NonEmpty[B] => + val result = (tree.key, tree.value) + addLeftMostBranchToPath(tree.right) + next = if (path.isEmpty) Empty else path.pop() + result + } + + @annotation.tailrec + private[this] def addLeftMostBranchToPath(tree: Tree[B]) { + tree match { + case Empty => + case tree: NonEmpty[B] => + path.push(tree) + addLeftMostBranchToPath(tree.left) + } + } + + private[this] val path: Stack[NonEmpty[B]] = Stack.empty[NonEmpty[B]] + addLeftMostBranchToPath(tree) + private[this] var next: Tree[B] = path.pop() + } +} -- cgit v1.2.3 From edcec038ed11dde5ccda92463d705916c3b39a34 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 17 Dec 2011 22:04:47 +0100 Subject: Optimized implementations of head/headOption/last/lastOption for TreeMap/TreeSet. --- src/library/scala/collection/immutable/RedBlack.scala | 3 +++ src/library/scala/collection/immutable/TreeMap.scala | 11 +++++++++++ src/library/scala/collection/immutable/TreeSet.scala | 5 +++++ 3 files changed, 19 insertions(+) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 097df54af2..2d3d839851 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -40,6 +40,7 @@ abstract class RedBlack[A] extends Serializable { def upd[B1 >: B](k: A, v: B1): Tree[B1] def del(k: A): Tree[B] def smallest: NonEmpty[B] + def greatest: NonEmpty[B] def rng(from: Option[A], until: Option[A]): Tree[B] def first : A def last : A @@ -148,6 +149,7 @@ abstract class RedBlack[A] extends Serializable { } def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest + def greatest: NonEmpty[B] = if (right.isEmpty) this else right.greatest def toStream: Stream[(A,B)] = iterator.toStream @@ -262,6 +264,7 @@ abstract class RedBlack[A] extends Serializable { def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty) def del(k: A): Tree[Nothing] = this def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") + def greatest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") def iterator: Iterator[(A, Nothing)] = Iterator.empty def toStream: Stream[(A,Nothing)] = Stream.empty diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 2fd5208991..be67a45b1e 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -71,6 +71,17 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va override def lastKey = t.last override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) + override def head = { + val smallest = t.smallest + (smallest.key, smallest.value) + } + override def headOption = if (t.isEmpty) None else Some(head) + override def last = { + val greatest = t.greatest + (greatest.key, greatest.value) + } + override def lastOption = if (t.isEmpty) None else Some(last) + /** A factory to create empty maps of the same type of keys. */ override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 05f27d0d93..85f4ae4b0e 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -53,6 +53,11 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) override def stringPrefix = "TreeSet" + override def head = t.smallest.key + override def headOption = if (t.isEmpty) None else Some(head) + override def last = t.greatest.key + override def lastOption = if (t.isEmpty) None else Some(last) + def isSmaller(x: A, y: A) = compare(x,y) < 0 def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) -- cgit v1.2.3 From 9cdede8f033f661cfa3840070089fadd1b17fede Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 17 Dec 2011 22:42:21 +0100 Subject: Optimized implementation of init/tail for TreeSet/TreeMap. --- src/library/scala/collection/immutable/TreeMap.scala | 3 +++ src/library/scala/collection/immutable/TreeSet.scala | 3 +++ 2 files changed, 6 insertions(+) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index be67a45b1e..0e160ca50e 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -82,6 +82,9 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va } override def lastOption = if (t.isEmpty) None else Some(last) + override def tail = new TreeMap(size - 1, tree.delete(firstKey)) + override def init = new TreeMap(size - 1, tree.delete(lastKey)) + /** A factory to create empty maps of the same type of keys. */ override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 85f4ae4b0e..b969ecc0e8 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -58,6 +58,9 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) override def last = t.greatest.key override def lastOption = if (t.isEmpty) None else Some(last) + override def tail = new TreeSet(size - 1, tree.delete(firstKey)) + override def init = new TreeSet(size - 1, tree.delete(lastKey)) + def isSmaller(x: A, y: A) = compare(x,y) < 0 def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) -- cgit v1.2.3 From b7e671446892c232afdfb5e36ceeab135ece649b Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 17 Dec 2011 19:17:52 +0100 Subject: RedBlack.scala: Change count from 'def' to 'val' in NonEmpty tree to ensure TreeSet/TreeMap 'range' operations are O(log n) instead of O(n). --- src/library/scala/collection/immutable/RedBlack.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 2d3d839851..534d476507 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -255,7 +255,7 @@ abstract class RedBlack[A] extends Serializable { } def first = if (left .isEmpty) key else left.first def last = if (right.isEmpty) key else right.last - def count = 1 + left.count + right.count + val count = 1 + left.count + right.count } case object Empty extends Tree[Nothing] { def isEmpty = true -- cgit v1.2.3 From 95cb7bc7e3017a0004a61749c7d121371c4fe31b Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sun, 18 Dec 2011 20:41:23 +0100 Subject: Implemented drop/take/slice/splitAt/dropRight/takeRight for TreeMap/TreeSet by splitting the underlying RedBlack tree. This makes the operation O(log n) instead of O(n) and allows more structural sharing. --- .../scala/collection/immutable/RedBlack.scala | 7 +++++++ .../scala/collection/immutable/TreeMap.scala | 23 ++++++++++++++++++++++ .../scala/collection/immutable/TreeSet.scala | 23 ++++++++++++++++++++++ 3 files changed, 53 insertions(+) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 534d476507..5ce2a29dc2 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -45,6 +45,7 @@ abstract class RedBlack[A] extends Serializable { def first : A def last : A def count : Int + protected[immutable] def nth(n: Int): NonEmpty[B] } abstract class NonEmpty[+B] extends Tree[B] with Serializable { def isEmpty = false @@ -256,6 +257,11 @@ abstract class RedBlack[A] extends Serializable { def first = if (left .isEmpty) key else left.first def last = if (right.isEmpty) key else right.last val count = 1 + left.count + right.count + protected[immutable] def nth(n: Int) = { + if (n < left.count) left.nth(n) + else if (n > left.count) right.nth(n - left.count - 1) + else this + } } case object Empty extends Tree[Nothing] { def isEmpty = true @@ -274,6 +280,7 @@ abstract class RedBlack[A] extends Serializable { def first = throw new NoSuchElementException("empty map") def last = throw new NoSuchElementException("empty map") def count = 0 + protected[immutable] def nth(n: Int) = throw new NoSuchElementException("empty map") } case class RedTree[+B](override val key: A, override val value: B, diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 0e160ca50e..bc91bbe268 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -85,6 +85,29 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va override def tail = new TreeMap(size - 1, tree.delete(firstKey)) override def init = new TreeMap(size - 1, tree.delete(lastKey)) + override def drop(n: Int) = { + if (n <= 0) this + else if (n >= size) empty + else from(tree.nth(n).key) + } + + override def take(n: Int) = { + if (n <= 0) empty + else if (n >= size) this + else until(tree.nth(n).key) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else range(tree.nth(from).key, tree.nth(until).key) + } + + override def dropRight(n: Int) = take(size - n) + override def takeRight(n: Int) = drop(size - n) + override def splitAt(n: Int) = (take(n), drop(n)) + /** A factory to create empty maps of the same type of keys. */ override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index b969ecc0e8..dfaffcd581 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -61,6 +61,29 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) override def tail = new TreeSet(size - 1, tree.delete(firstKey)) override def init = new TreeSet(size - 1, tree.delete(lastKey)) + override def drop(n: Int) = { + if (n <= 0) this + else if (n >= size) empty + else from(tree.nth(n).key) + } + + override def take(n: Int) = { + if (n <= 0) empty + else if (n >= size) this + else until(tree.nth(n).key) + } + + override def slice(from: Int, until: Int) = { + if (until <= from) empty + else if (from <= 0) take(until) + else if (until >= size) drop(from) + else range(tree.nth(from).key, tree.nth(until).key) + } + + override def dropRight(n: Int) = take(size - n) + override def takeRight(n: Int) = drop(size - n) + override def splitAt(n: Int) = (take(n), drop(n)) + def isSmaller(x: A, y: A) = compare(x,y) < 0 def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) -- cgit v1.2.3 From 8d678236d820619819e52f2497d1dd1df29f1184 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sun, 18 Dec 2011 22:49:39 +0100 Subject: Implemented takeWhile/dropWhile/span to use tree splitting. This changes the operation from O(n log n) to O(n) and allows for more structural sharing. --- src/library/scala/collection/immutable/TreeMap.scala | 13 +++++++++++++ src/library/scala/collection/immutable/TreeSet.scala | 13 +++++++++++++ 2 files changed, 26 insertions(+) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index bc91bbe268..da2aef1c22 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -108,6 +108,19 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va override def takeRight(n: Int) = drop(size - n) override def splitAt(n: Int) = (take(n), drop(n)) + private[this] def countWhile(p: ((A, B)) => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next)) result += 1 + result + } + override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) + override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p)) + override def span(p: ((A, B)) => Boolean) = { + val n = countWhile(p) + (take(n), drop(n)) + } + /** A factory to create empty maps of the same type of keys. */ override def empty: TreeMap[A, B] = TreeMap.empty[A, B](ordering) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index dfaffcd581..7fb333959e 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -84,6 +84,19 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) override def takeRight(n: Int) = drop(size - n) override def splitAt(n: Int) = (take(n), drop(n)) + private[this] def countWhile(p: A => Boolean): Int = { + var result = 0 + val it = iterator + while (it.hasNext && p(it.next)) result += 1 + result + } + override def dropWhile(p: A => Boolean) = drop(countWhile(p)) + override def takeWhile(p: A => Boolean) = take(countWhile(p)) + override def span(p: A => Boolean) = { + val n = countWhile(p) + (take(n), drop(n)) + } + def isSmaller(x: A, y: A) = compare(x,y) < 0 def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) -- cgit v1.2.3 From 3f66061af59bd5fc985dfbcf60da6238eba32848 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 19 Dec 2011 22:48:48 +0100 Subject: Switched from isSmaller to ordering. --- src/library/scala/collection/immutable/RedBlack.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 5ce2a29dc2..cd2a1e716d 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -18,7 +18,7 @@ package immutable @SerialVersionUID(8691885935445612921L) abstract class RedBlack[A] extends Serializable { - def isSmaller(x: A, y: A): Boolean + implicit val ordering: Ordering[A] private def blacken[B](t: Tree[B]): Tree[B] = t match { case RedTree(k, v, l, r) => BlackTree(k, v, l, r) @@ -54,8 +54,8 @@ abstract class RedBlack[A] extends Serializable { def left: Tree[B] def right: Tree[B] def lookup(k: A): Tree[B] = - if (isSmaller(k, key)) left.lookup(k) - else if (isSmaller(key, k)) right.lookup(k) + if (ordering.lt(k, key)) left.lookup(k) + else if (ordering.lt(key, k)) right.lookup(k) else this private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match { case RedTree(y, yv, RedTree(x, xv, a, b), c) => @@ -74,8 +74,8 @@ abstract class RedBlack[A] extends Serializable { mkTree(isBlack, x, xv, a, r) } def upd[B1 >: B](k: A, v: B1): Tree[B1] = { - if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) - else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) + if (ordering.lt(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) + else if (ordering.lt(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) else mkTree(isBlack, k, v, left, right) } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees @@ -143,8 +143,8 @@ abstract class RedBlack[A] extends Serializable { } // RedBlack is neither A : Ordering[A], nor A <% Ordered[A] k match { - case _ if isSmaller(k, key) => delLeft - case _ if isSmaller(key, k) => delRight + case _ if ordering.lt(k, key) => delLeft + case _ if ordering.lt(key, k) => delRight case _ => append(left, right) } } @@ -164,8 +164,8 @@ abstract class RedBlack[A] extends Serializable { override def rng(from: Option[A], until: Option[A]): Tree[B] = { if (from == None && until == None) return this - if (from != None && isSmaller(key, from.get)) return right.rng(from, until); - if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get))) + if (from != None && ordering.lt(key, from.get)) return right.rng(from, until); + if (until != None && (ordering.lt(until.get,key) || !ordering.lt(key,until.get))) return left.rng(from, until); val newLeft = left.rng(from, None) val newRight = right.rng(None, until) -- cgit v1.2.3 From 7ec9b0bd35220552c262ff328de1e2ea36252c32 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 19 Dec 2011 22:56:21 +0100 Subject: Moved from implicit ordering value to implicit parameter. --- .../scala/collection/immutable/RedBlack.scala | 32 ++++++++++------------ 1 file changed, 15 insertions(+), 17 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index cd2a1e716d..8235ee9fb5 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -18,8 +18,6 @@ package immutable @SerialVersionUID(8691885935445612921L) abstract class RedBlack[A] extends Serializable { - implicit val ordering: Ordering[A] - private def blacken[B](t: Tree[B]): Tree[B] = t match { case RedTree(k, v, l, r) => BlackTree(k, v, l, r) case t => t @@ -30,18 +28,18 @@ abstract class RedBlack[A] extends Serializable { abstract class Tree[+B] extends Serializable { def isEmpty: Boolean def isBlack: Boolean - def lookup(x: A): Tree[B] - def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v)) - def delete(k: A): Tree[B] = blacken(del(k)) - def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until)) + def lookup(x: A)(implicit ordering: Ordering[A]): Tree[B] + def update[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] = blacken(upd(k, v)) + def delete(k: A)(implicit ordering: Ordering[A]): Tree[B] = blacken(del(k)) + def range(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] = blacken(rng(from, until)) def foreach[U](f: (A, B) => U) def toStream: Stream[(A,B)] def iterator: Iterator[(A, B)] - def upd[B1 >: B](k: A, v: B1): Tree[B1] - def del(k: A): Tree[B] + def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] + def del(k: A)(implicit ordering: Ordering[A]): Tree[B] def smallest: NonEmpty[B] def greatest: NonEmpty[B] - def rng(from: Option[A], until: Option[A]): Tree[B] + def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] def first : A def last : A def count : Int @@ -53,7 +51,7 @@ abstract class RedBlack[A] extends Serializable { def value: B def left: Tree[B] def right: Tree[B] - def lookup(k: A): Tree[B] = + def lookup(k: A)(implicit ordering: Ordering[A]): Tree[B] = if (ordering.lt(k, key)) left.lookup(k) else if (ordering.lt(key, k)) right.lookup(k) else this @@ -73,14 +71,14 @@ abstract class RedBlack[A] extends Serializable { case _ => mkTree(isBlack, x, xv, a, r) } - def upd[B1 >: B](k: A, v: B1): Tree[B1] = { + def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] = { if (ordering.lt(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) else if (ordering.lt(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) else mkTree(isBlack, k, v, left, right) } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - def del(k: A): Tree[B] = { + def del(k: A)(implicit ordering: Ordering[A]): Tree[B] = { def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) => RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d)) @@ -162,7 +160,7 @@ abstract class RedBlack[A] extends Serializable { right foreach f } - override def rng(from: Option[A], until: Option[A]): Tree[B] = { + override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] = { if (from == None && until == None) return this if (from != None && ordering.lt(key, from.get)) return right.rng(from, until); if (until != None && (ordering.lt(until.get,key) || !ordering.lt(key,until.get))) @@ -266,9 +264,9 @@ abstract class RedBlack[A] extends Serializable { case object Empty extends Tree[Nothing] { def isEmpty = true def isBlack = true - def lookup(k: A): Tree[Nothing] = this - def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty) - def del(k: A): Tree[Nothing] = this + def lookup(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this + def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[B] = RedTree(k, v, Empty, Empty) + def del(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") def greatest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") def iterator: Iterator[(A, Nothing)] = Iterator.empty @@ -276,7 +274,7 @@ abstract class RedBlack[A] extends Serializable { def foreach[U](f: (A, Nothing) => U) {} - def rng(from: Option[A], until: Option[A]) = this + def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) = this def first = throw new NoSuchElementException("empty map") def last = throw new NoSuchElementException("empty map") def count = 0 -- cgit v1.2.3 From a02a81574ea2329dd04241abcba8f8fba40e61ac Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 19 Dec 2011 23:00:06 +0100 Subject: Moved from Empty case object to case class in preparation of moving type parameter A. --- .../scala/collection/immutable/RedBlack.scala | 26 +++++++++++----------- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- 3 files changed, 15 insertions(+), 15 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 8235ee9fb5..6ca5a286f4 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -124,8 +124,8 @@ abstract class RedBlack[A] extends Serializable { case _ => RedTree(key, value, left, right.del(k)) } def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match { - case (Empty, t) => t - case (t, Empty) => t + case (Empty(), t) => t + case (t, Empty()) => t case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => append(b, c) match { case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d)) @@ -168,8 +168,8 @@ abstract class RedBlack[A] extends Serializable { val newLeft = left.rng(from, None) val newRight = right.rng(None, until) if ((newLeft eq left) && (newRight eq right)) this - else if (newLeft eq Empty) newRight.upd(key, value); - else if (newRight eq Empty) newLeft.upd(key, value); + else if (newLeft.isEmpty) newRight.upd(key, value); + else if (newRight.isEmpty) newLeft.upd(key, value); else rebalance(newLeft, newRight) } @@ -188,7 +188,7 @@ abstract class RedBlack[A] extends Serializable { val next = if (leftMost) zipper.head.left else zipper.head.right next match { case node: NonEmpty[_] => unzip(node :: zipper, leftMost) - case Empty => zipper + case Empty() => zipper } } @@ -207,12 +207,12 @@ abstract class RedBlack[A] extends Serializable { unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) case (l @ RedTree(_, _, _, _), _) => unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) - case (Empty, Empty) => + case (Empty(), Empty()) => (Nil, true, false, smallerDepth) - case (Empty, r @ BlackTree(_, _, _, _)) => + case (Empty(), r @ BlackTree(_, _, _, _)) => val leftMost = true (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) - case (l @ BlackTree(_, _, _, _), Empty) => + case (l @ BlackTree(_, _, _, _), Empty()) => val leftMost = false (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) } @@ -261,11 +261,11 @@ abstract class RedBlack[A] extends Serializable { else this } } - case object Empty extends Tree[Nothing] { + case class Empty() extends Tree[Nothing] { def isEmpty = true def isBlack = true def lookup(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this - def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[B] = RedTree(k, v, Empty, Empty) + def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[B] = RedTree(k, v, this, this) def del(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") def greatest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") @@ -299,19 +299,19 @@ abstract class RedBlack[A] extends Serializable { override def hasNext: Boolean = !next.isEmpty override def next: (A, B) = next match { - case Empty => + case Empty() => throw new NoSuchElementException("next on empty iterator") case tree: NonEmpty[B] => val result = (tree.key, tree.value) addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) Empty else path.pop() + next = if (path.isEmpty) Empty() else path.pop() result } @annotation.tailrec private[this] def addLeftMostBranchToPath(tree: Tree[B]) { tree match { - case Empty => + case Empty() => case tree: NonEmpty[B] => path.push(tree) addLeftMostBranchToPath(tree.left) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index da2aef1c22..bdb4533faa 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -60,7 +60,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack[A]#Tree[B] = if (size == 0) Empty else t + protected val tree: RedBlack[A]#Tree[B] = if (size == 0) Empty() else t override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { val ntree = tree.range(from,until) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 7fb333959e..7b90d6d9c3 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -101,7 +101,7 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack[A]#Tree[Unit] = if (size == 0) Empty else t + protected val tree: RedBlack[A]#Tree[Unit] = if (size == 0) Empty() else t private def newSet(s: Int, t: RedBlack[A]#Tree[Unit]) = new TreeSet[A](s, t) -- cgit v1.2.3 From 418adc642cbde26c09fe8ee24e019d89f6b123f9 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 19 Dec 2011 23:08:06 +0100 Subject: Moved type parameter A from RedBlack to Tree. --- .../scala/collection/immutable/RedBlack.scala | 124 ++++++++++----------- .../scala/collection/immutable/TreeMap.scala | 10 +- .../scala/collection/immutable/TreeSet.scala | 8 +- 3 files changed, 71 insertions(+), 71 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 6ca5a286f4..3fbe9a3407 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -16,46 +16,46 @@ package immutable * @since 2.3 */ @SerialVersionUID(8691885935445612921L) -abstract class RedBlack[A] extends Serializable { +abstract class RedBlack extends Serializable { - private def blacken[B](t: Tree[B]): Tree[B] = t match { + private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { case RedTree(k, v, l, r) => BlackTree(k, v, l, r) case t => t } - private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) = + private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - abstract class Tree[+B] extends Serializable { + abstract class Tree[A, +B] extends Serializable { def isEmpty: Boolean def isBlack: Boolean - def lookup(x: A)(implicit ordering: Ordering[A]): Tree[B] - def update[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] = blacken(upd(k, v)) - def delete(k: A)(implicit ordering: Ordering[A]): Tree[B] = blacken(del(k)) - def range(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] = blacken(rng(from, until)) + def lookup(x: A)(implicit ordering: Ordering[A]): Tree[A, B] + def update[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(k, v)) + def delete(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(k)) + def range(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(from, until)) def foreach[U](f: (A, B) => U) def toStream: Stream[(A,B)] def iterator: Iterator[(A, B)] - def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] - def del(k: A)(implicit ordering: Ordering[A]): Tree[B] - def smallest: NonEmpty[B] - def greatest: NonEmpty[B] - def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] + def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] + def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] + def smallest: NonEmpty[A, B] + def greatest: NonEmpty[A, B] + def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] def first : A def last : A def count : Int - protected[immutable] def nth(n: Int): NonEmpty[B] + protected[immutable] def nth(n: Int): NonEmpty[A, B] } - abstract class NonEmpty[+B] extends Tree[B] with Serializable { + abstract class NonEmpty[A, +B] extends Tree[A, B] with Serializable { def isEmpty = false def key: A def value: B - def left: Tree[B] - def right: Tree[B] - def lookup(k: A)(implicit ordering: Ordering[A]): Tree[B] = + def left: Tree[A, B] + def right: Tree[A, B] + def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (ordering.lt(k, key)) left.lookup(k) else if (ordering.lt(key, k)) right.lookup(k) else this - private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match { + private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = l match { case RedTree(y, yv, RedTree(x, xv, a, b), c) => RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) case RedTree(x, xv, a, RedTree(y, yv, b, c)) => @@ -63,7 +63,7 @@ abstract class RedBlack[A] extends Serializable { case _ => mkTree(isBlack, z, zv, l, d) } - private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match { + private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1])/*: NonEmpty[A, B1]*/ = r match { case RedTree(z, zv, RedTree(y, yv, b, c), d) => RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) case RedTree(y, yv, b, RedTree(z, zv, c, d)) => @@ -71,15 +71,15 @@ abstract class RedBlack[A] extends Serializable { case _ => mkTree(isBlack, x, xv, a, r) } - def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[B1] = { + def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = { if (ordering.lt(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) else if (ordering.lt(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) else mkTree(isBlack, k, v, left, right) } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - def del(k: A)(implicit ordering: Ordering[A]): Tree[B] = { - def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) => RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d)) case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) => @@ -93,11 +93,11 @@ abstract class RedBlack[A] extends Serializable { case (a, b) => BlackTree(x, xv, a, b) } - def subl(t: Tree[B]) = t match { + def subl(t: Tree[A, B]) = t match { case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b) case _ => sys.error("Defect: invariance violation; expected black, got "+t) } - def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { case (RedTree(y, yv, a, b), c) => RedTree(x, xv, BlackTree(y, yv, a, b), c) case (bl, BlackTree(y, yv, a, b)) => @@ -106,7 +106,7 @@ abstract class RedBlack[A] extends Serializable { RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c))) case _ => sys.error("Defect: invariance violation at "+right) } - def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { case (a, RedTree(y, yv, b, c)) => RedTree(x, xv, a, BlackTree(y, yv, b, c)) case (BlackTree(y, yv, a, b), bl) => @@ -116,14 +116,14 @@ abstract class RedBlack[A] extends Serializable { case _ => sys.error("Defect: invariance violation at "+left) } def delLeft = left match { - case _: BlackTree[_] => balLeft(key, value, left.del(k), right) + case _: BlackTree[_, _] => balLeft(key, value, left.del(k), right) case _ => RedTree(key, value, left.del(k), right) } def delRight = right match { - case _: BlackTree[_] => balRight(key, value, left, right.del(k)) + case _: BlackTree[_, _] => balRight(key, value, left, right.del(k)) case _ => RedTree(key, value, left, right.del(k)) } - def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match { + def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = (tl, tr) match { case (Empty(), t) => t case (t, Empty()) => t case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => @@ -147,8 +147,8 @@ abstract class RedBlack[A] extends Serializable { } } - def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest - def greatest: NonEmpty[B] = if (right.isEmpty) this else right.greatest + def smallest: NonEmpty[A, B] = if (left.isEmpty) this else left.smallest + def greatest: NonEmpty[A, B] = if (right.isEmpty) this else right.greatest def toStream: Stream[(A,B)] = iterator.toStream @@ -160,7 +160,7 @@ abstract class RedBlack[A] extends Serializable { right foreach f } - override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[B] = { + override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { if (from == None && until == None) return this if (from != None && ordering.lt(key, from.get)) return right.rng(from, until); if (until != None && (ordering.lt(until.get,key) || !ordering.lt(key,until.get))) @@ -182,23 +182,23 @@ abstract class RedBlack[A] extends Serializable { // whether the zipper was traversed left-most or right-most. // If the trees were balanced, returns an empty zipper - private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = { + private[this] def compareDepth(left: Tree[A, B], right: Tree[A, B]): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = { // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = { + def unzip(zipper: List[NonEmpty[A, B]], leftMost: Boolean): List[NonEmpty[A, B]] = { val next = if (leftMost) zipper.head.left else zipper.head.right next match { - case node: NonEmpty[_] => unzip(node :: zipper, leftMost) - case Empty() => zipper + case node: NonEmpty[_, _] => unzip(node :: zipper, leftMost) + case Empty() => zipper } } // Unzip left tree on the rightmost side and right tree on the leftmost side until one is // found to be deeper, or the bottom is reached - def unzipBoth(left: Tree[B], - right: Tree[B], - leftZipper: List[NonEmpty[B]], - rightZipper: List[NonEmpty[B]], - smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match { + def unzipBoth(left: Tree[A, B], + right: Tree[A, B], + leftZipper: List[NonEmpty[A, B]], + rightZipper: List[NonEmpty[A, B]], + smallerDepth: Int): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = (left, right) match { case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) => unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1) case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) => @@ -219,9 +219,9 @@ abstract class RedBlack[A] extends Serializable { unzipBoth(left, right, Nil, Nil, 0) } - private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = { + private[this] def rebalance(newLeft: Tree[A, B], newRight: Tree[A, B]) = { // This is like drop(n-1), but only counting black nodes - def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match { + def findDepth(zipper: List[NonEmpty[A, B]], depth: Int): List[NonEmpty[A, B]] = zipper match { case BlackTree(_, _, _, _) :: tail => if (depth == 1) zipper else findDepth(tail, depth - 1) case _ :: tail => findDepth(tail, depth) @@ -243,7 +243,7 @@ abstract class RedBlack[A] extends Serializable { } else { RedTree(key, value, zipFrom.head, blkNewRight) } - val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) => + val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) => if (leftMost) balanceLeft(node.isBlack, node.key, node.value, tree, node.right) else @@ -261,14 +261,14 @@ abstract class RedBlack[A] extends Serializable { else this } } - case class Empty() extends Tree[Nothing] { + case class Empty[A]() extends Tree[A, Nothing] { def isEmpty = true def isBlack = true - def lookup(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this - def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[B] = RedTree(k, v, this, this) - def del(k: A)(implicit ordering: Ordering[A]): Tree[Nothing] = this - def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") - def greatest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") + def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this + def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[A, B] = RedTree(k, v, this, this) + def del(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this + def smallest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") + def greatest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") def iterator: Iterator[(A, Nothing)] = Iterator.empty def toStream: Stream[(A,Nothing)] = Stream.empty @@ -280,20 +280,20 @@ abstract class RedBlack[A] extends Serializable { def count = 0 protected[immutable] def nth(n: Int) = throw new NoSuchElementException("empty map") } - case class RedTree[+B](override val key: A, + case class RedTree[A, +B](override val key: A, override val value: B, - override val left: Tree[B], - override val right: Tree[B]) extends NonEmpty[B] { + override val left: Tree[A, B], + override val right: Tree[A, B]) extends NonEmpty[A, B] { def isBlack = false } - case class BlackTree[+B](override val key: A, + case class BlackTree[A, +B](override val key: A, override val value: B, - override val left: Tree[B], - override val right: Tree[B]) extends NonEmpty[B] { + override val left: Tree[A, B], + override val right: Tree[A, B]) extends NonEmpty[A, B] { def isBlack = true } - private[this] class TreeIterator[B](tree: NonEmpty[B]) extends Iterator[(A, B)] { + private[this] class TreeIterator[A, B](tree: NonEmpty[A, B]) extends Iterator[(A, B)] { import collection.mutable.Stack override def hasNext: Boolean = !next.isEmpty @@ -301,7 +301,7 @@ abstract class RedBlack[A] extends Serializable { override def next: (A, B) = next match { case Empty() => throw new NoSuchElementException("next on empty iterator") - case tree: NonEmpty[B] => + case tree: NonEmpty[A, B] => val result = (tree.key, tree.value) addLeftMostBranchToPath(tree.right) next = if (path.isEmpty) Empty() else path.pop() @@ -309,17 +309,17 @@ abstract class RedBlack[A] extends Serializable { } @annotation.tailrec - private[this] def addLeftMostBranchToPath(tree: Tree[B]) { + private[this] def addLeftMostBranchToPath(tree: Tree[A, B]) { tree match { case Empty() => - case tree: NonEmpty[B] => + case tree: NonEmpty[A, B] => path.push(tree) addLeftMostBranchToPath(tree.left) } } - private[this] val path: Stack[NonEmpty[B]] = Stack.empty[NonEmpty[B]] + private[this] val path: Stack[NonEmpty[A, B]] = Stack.empty[NonEmpty[A, B]] addLeftMostBranchToPath(tree) - private[this] var next: Tree[B] = path.pop() + private[this] var next: Tree[A, B] = path.pop() } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index bdb4533faa..3dfda05e17 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -23,7 +23,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) /** $sortedMapCanBuildFromInfo */ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B] - private def make[A, B](s: Int, t: RedBlack[A]#Tree[B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord) + private def make[A, B](s: Int, t: RedBlack#Tree[A, B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord) } /** This class implements immutable maps using a tree. @@ -46,8 +46,8 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit val ordering: Ordering[A]) - extends RedBlack[A] +class TreeMap[A, +B](override val size: Int, t: RedBlack#Tree[A, B])(implicit val ordering: Ordering[A]) + extends RedBlack with SortedMap[A, B] with SortedMapLike[A, B, TreeMap[A, B]] with MapLike[A, B, TreeMap[A, B]] @@ -60,7 +60,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack[A]#Tree[B] = if (size == 0) Empty() else t + protected val tree: RedBlack#Tree[A, B] = if (size == 0) Empty() else t override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { val ntree = tree.range(from,until) @@ -194,7 +194,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack[A]#Tree[B])(implicit va * @return the value of the mapping, if it exists */ override def get(key: A): Option[B] = tree.lookup(key) match { - case n: NonEmpty[b] => Some(n.value) + case n: NonEmpty[_, _] => Some(n.value) case _ => None } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 7b90d6d9c3..47a28f88df 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -47,9 +47,9 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @define willNotTerminateInf */ @SerialVersionUID(-234066569443569402L) -class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) +class TreeSet[A](override val size: Int, t: RedBlack#Tree[A, Unit]) (implicit val ordering: Ordering[A]) - extends RedBlack[A] with SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { + extends RedBlack with SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { override def stringPrefix = "TreeSet" @@ -101,9 +101,9 @@ class TreeSet[A](override val size: Int, t: RedBlack[A]#Tree[Unit]) def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack[A]#Tree[Unit] = if (size == 0) Empty() else t + protected val tree: RedBlack#Tree[A, Unit] = if (size == 0) Empty() else t - private def newSet(s: Int, t: RedBlack[A]#Tree[Unit]) = new TreeSet[A](s, t) + private def newSet(s: Int, t: RedBlack#Tree[A, Unit]) = new TreeSet[A](s, t) /** A factory to create empty sets of the same type of keys. */ -- cgit v1.2.3 From 6c0e0362be6c37ed4531d8cfca15c6e516d5f0f8 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 19 Dec 2011 23:10:32 +0100 Subject: Changed abstract class RedBlack to singleton object. --- src/library/scala/collection/immutable/RedBlack.scala | 2 +- src/library/scala/collection/immutable/TreeMap.scala | 11 ++++++----- src/library/scala/collection/immutable/TreeSet.scala | 10 ++++++---- 3 files changed, 13 insertions(+), 10 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 3fbe9a3407..4069c86c57 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -16,7 +16,7 @@ package immutable * @since 2.3 */ @SerialVersionUID(8691885935445612921L) -abstract class RedBlack extends Serializable { +object RedBlack extends Serializable { private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { case RedTree(k, v, l, r) => BlackTree(k, v, l, r) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 3dfda05e17..a13e78086b 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -23,7 +23,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) /** $sortedMapCanBuildFromInfo */ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B] - private def make[A, B](s: Int, t: RedBlack#Tree[A, B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord) + private def make[A, B](s: Int, t: RedBlack.Tree[A, B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord) } /** This class implements immutable maps using a tree. @@ -46,13 +46,14 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -class TreeMap[A, +B](override val size: Int, t: RedBlack#Tree[A, B])(implicit val ordering: Ordering[A]) - extends RedBlack - with SortedMap[A, B] +class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit val ordering: Ordering[A]) + extends SortedMap[A, B] with SortedMapLike[A, B, TreeMap[A, B]] with MapLike[A, B, TreeMap[A, B]] with Serializable { + import RedBlack._ + def isSmaller(x: A, y: A) = ordering.lt(x, y) override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = @@ -60,7 +61,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack#Tree[A, B])(implicit va def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack#Tree[A, B] = if (size == 0) Empty() else t + protected val tree: RedBlack.Tree[A, B] = if (size == 0) Empty() else t override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { val ntree = tree.range(from,until) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 47a28f88df..8462ae5af3 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -47,9 +47,11 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @define willNotTerminateInf */ @SerialVersionUID(-234066569443569402L) -class TreeSet[A](override val size: Int, t: RedBlack#Tree[A, Unit]) +class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) (implicit val ordering: Ordering[A]) - extends RedBlack with SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { + extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { + + import RedBlack._ override def stringPrefix = "TreeSet" @@ -101,9 +103,9 @@ class TreeSet[A](override val size: Int, t: RedBlack#Tree[A, Unit]) def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack#Tree[A, Unit] = if (size == 0) Empty() else t + protected val tree: RedBlack.Tree[A, Unit] = if (size == 0) Empty() else t - private def newSet(s: Int, t: RedBlack#Tree[A, Unit]) = new TreeSet[A](s, t) + private def newSet(s: Int, t: RedBlack.Tree[A, Unit]) = new TreeSet[A](s, t) /** A factory to create empty sets of the same type of keys. */ -- cgit v1.2.3 From d2706db10c63851e549ef7ce4cbaff364c59fbc3 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 20 Dec 2011 00:01:04 +0100 Subject: Use single shared Empty instance across all RedBlack trees. --- .../scala/collection/immutable/RedBlack.scala | 51 +++++++++++++--------- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- 3 files changed, 32 insertions(+), 23 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 4069c86c57..fad4f7fd53 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -15,7 +15,7 @@ package immutable * * @since 2.3 */ -@SerialVersionUID(8691885935445612921L) +private[immutable] object RedBlack extends Serializable { private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { @@ -25,7 +25,7 @@ object RedBlack extends Serializable { private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - abstract class Tree[A, +B] extends Serializable { + sealed abstract class Tree[A, +B] extends Serializable { def isEmpty: Boolean def isBlack: Boolean def lookup(x: A)(implicit ordering: Ordering[A]): Tree[A, B] @@ -45,7 +45,7 @@ object RedBlack extends Serializable { def count : Int protected[immutable] def nth(n: Int): NonEmpty[A, B] } - abstract class NonEmpty[A, +B] extends Tree[A, B] with Serializable { + sealed abstract class NonEmpty[A, +B] extends Tree[A, B] with Serializable { def isEmpty = false def key: A def value: B @@ -124,8 +124,8 @@ object RedBlack extends Serializable { case _ => RedTree(key, value, left, right.del(k)) } def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = (tl, tr) match { - case (Empty(), t) => t - case (t, Empty()) => t + case (Empty.Instance, t) => t + case (t, Empty.Instance) => t case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => append(b, c) match { case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d)) @@ -147,8 +147,8 @@ object RedBlack extends Serializable { } } - def smallest: NonEmpty[A, B] = if (left.isEmpty) this else left.smallest - def greatest: NonEmpty[A, B] = if (right.isEmpty) this else right.greatest + def smallest: NonEmpty[A, B] = if (left eq Empty.Instance) this else left.smallest + def greatest: NonEmpty[A, B] = if (right eq Empty.Instance) this else right.greatest def toStream: Stream[(A,B)] = iterator.toStream @@ -168,8 +168,8 @@ object RedBlack extends Serializable { val newLeft = left.rng(from, None) val newRight = right.rng(None, until) if ((newLeft eq left) && (newRight eq right)) this - else if (newLeft.isEmpty) newRight.upd(key, value); - else if (newRight.isEmpty) newLeft.upd(key, value); + else if (newLeft eq Empty.Instance) newRight.upd(key, value); + else if (newRight eq Empty.Instance) newLeft.upd(key, value); else rebalance(newLeft, newRight) } @@ -188,7 +188,7 @@ object RedBlack extends Serializable { val next = if (leftMost) zipper.head.left else zipper.head.right next match { case node: NonEmpty[_, _] => unzip(node :: zipper, leftMost) - case Empty() => zipper + case _ => zipper } } @@ -207,12 +207,12 @@ object RedBlack extends Serializable { unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) case (l @ RedTree(_, _, _, _), _) => unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) - case (Empty(), Empty()) => + case (Empty.Instance, Empty.Instance) => (Nil, true, false, smallerDepth) - case (Empty(), r @ BlackTree(_, _, _, _)) => + case (Empty.Instance, r @ BlackTree(_, _, _, _)) => val leftMost = true (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) - case (l @ BlackTree(_, _, _, _), Empty()) => + case (l @ BlackTree(_, _, _, _), Empty.Instance) => val leftMost = false (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) } @@ -252,8 +252,8 @@ object RedBlack extends Serializable { zippedTree } } - def first = if (left .isEmpty) key else left.first - def last = if (right.isEmpty) key else right.last + def first = if (left eq Empty.Instance) key else left.first + def last = if (right eq Empty.Instance) key else right.last val count = 1 + left.count + right.count protected[immutable] def nth(n: Int) = { if (n < left.count) left.nth(n) @@ -261,7 +261,12 @@ object RedBlack extends Serializable { else this } } - case class Empty[A]() extends Tree[A, Nothing] { + object Empty { + def empty[A]: Tree[A, Nothing] = Instance.asInstanceOf[Tree[A, Nothing]] + + val Instance: Tree[_ >: Nothing, Nothing] = Empty[Nothing]() + } + final case class Empty[A] private () extends Tree[A, Nothing] { def isEmpty = true def isBlack = true def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this @@ -279,14 +284,18 @@ object RedBlack extends Serializable { def last = throw new NoSuchElementException("empty map") def count = 0 protected[immutable] def nth(n: Int) = throw new NoSuchElementException("empty map") + + override def toString() = "Empty" + + private def readResolve() = Empty.empty } - case class RedTree[A, +B](override val key: A, + final case class RedTree[A, +B](override val key: A, override val value: B, override val left: Tree[A, B], override val right: Tree[A, B]) extends NonEmpty[A, B] { def isBlack = false } - case class BlackTree[A, +B](override val key: A, + final case class BlackTree[A, +B](override val key: A, override val value: B, override val left: Tree[A, B], override val right: Tree[A, B]) extends NonEmpty[A, B] { @@ -299,19 +308,19 @@ object RedBlack extends Serializable { override def hasNext: Boolean = !next.isEmpty override def next: (A, B) = next match { - case Empty() => + case Empty.Instance => throw new NoSuchElementException("next on empty iterator") case tree: NonEmpty[A, B] => val result = (tree.key, tree.value) addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) Empty() else path.pop() + next = if (path.isEmpty) Empty.empty else path.pop() result } @annotation.tailrec private[this] def addLeftMostBranchToPath(tree: Tree[A, B]) { tree match { - case Empty() => + case Empty.Instance => case tree: NonEmpty[A, B] => path.push(tree) addLeftMostBranchToPath(tree.left) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index a13e78086b..bab60f06fb 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -61,7 +61,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit va def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack.Tree[A, B] = if (size == 0) Empty() else t + protected val tree: RedBlack.Tree[A, B] = if (size == 0) Empty.empty else t override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { val ntree = tree.range(from,until) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 8462ae5af3..2e6ba17749 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -103,7 +103,7 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - protected val tree: RedBlack.Tree[A, Unit] = if (size == 0) Empty() else t + protected val tree: RedBlack.Tree[A, Unit] = if (size == 0) Empty.empty else t private def newSet(s: Int, t: RedBlack.Tree[A, Unit]) = new TreeSet[A](s, t) -- cgit v1.2.3 From 6b950741c58938eab922908ac4fb809b7ca68c01 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Wed, 21 Dec 2011 09:28:42 +0100 Subject: Make sure the redblack test compiles and runs. --- .../scala/collection/immutable/RedBlack.scala | 3 +- test/files/scalacheck/redblack.scala | 76 ++++++++++------------ 2 files changed, 37 insertions(+), 42 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index fad4f7fd53..4b81182657 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -11,11 +11,10 @@ package scala.collection package immutable -/** A base class containing the implementations for `TreeMaps` and `TreeSets`. +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * * @since 2.3 */ -private[immutable] object RedBlack extends Serializable { private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala index 1fcaa46f0e..011a5d0ca5 100644 --- a/test/files/scalacheck/redblack.scala +++ b/test/files/scalacheck/redblack.scala @@ -18,22 +18,18 @@ abstract class RedBlackTest extends Properties("RedBlack") { def minimumSize = 0 def maximumSize = 5 - object RedBlackTest extends scala.collection.immutable.RedBlack[String] { - def isSmaller(x: String, y: String) = x < y - } - - import RedBlackTest._ + import collection.immutable.RedBlack._ - def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) + def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) Some(tree.iterator.drop(n).next) else None - def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key + def treeContains[A](tree: Tree[String, A], key: String) = tree.iterator.map(_._1) contains key - def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] = + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = if (level == 0) { - value(Empty) + value(Empty.empty) } else { for { oddOrEven <- choose(0, 2) @@ -56,10 +52,10 @@ abstract class RedBlackTest extends Properties("RedBlack") { } yield tree type ModifyParm - def genParm(tree: Tree[Int]): Gen[ModifyParm] - def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] + def genParm(tree: Tree[String, Int]): Gen[ModifyParm] + def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] - def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for { + def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for { tree <- genTree parm <- genParm(tree) } yield (tree, parm, modify(tree, parm)) @@ -68,30 +64,30 @@ abstract class RedBlackTest extends Properties("RedBlack") { trait RedBlackInvariants { self: RedBlackTest => - import RedBlackTest._ + import collection.immutable.RedBlack._ - def rootIsBlack[A](t: Tree[A]) = t.isBlack + def rootIsBlack[A](t: Tree[String, A]) = t.isBlack - def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match { - case Empty => t.isBlack - case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack + def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match { + case Empty.Instance => t.isBlack + case ne: NonEmpty[_, _] => List(ne.left, ne.right) forall areAllLeavesBlack } - def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match { + def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match { case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t)) case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack - case Empty => true + case Empty.Instance => true } - def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match { - case Empty => List(1) + def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match { + case Empty.Instance => List(1) case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves } - def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match { - case Empty => true - case ne: NonEmpty[_] => + def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match { + case Empty.Instance => true + case ne: NonEmpty[_, _] => ( blackNodesToLeaves(ne).distinct.size == 1 && areBlackNodesToLeavesEqual(ne.left) @@ -99,10 +95,10 @@ trait RedBlackInvariants { ) } - def orderIsPreserved[A](t: Tree[A]): Boolean = - t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) } + def orderIsPreserved[A](t: Tree[String, A]): Boolean = + t.iterator zip t.iterator.drop(1) forall { case (x, y) => x._1 < y._1 } - def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => + def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => invariant(newTree) } @@ -114,13 +110,13 @@ trait RedBlackInvariants { } object TestInsert extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ + import collection.immutable.RedBlack._ override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0) + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = tree update (generateKey(tree, parm), 0) - def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match { + def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { case Some((key, _)) => key.init.mkString + "MN" case None => nodeAt(tree, parm - 1) match { case Some((key, _)) => key.init.mkString + "RN" @@ -134,13 +130,13 @@ object TestInsert extends RedBlackTest with RedBlackInvariants { } object TestModify extends RedBlackTest { - import RedBlackTest._ + import collection.immutable.RedBlack._ def newValue = 1 override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { case (key, _) => tree update (key, newValue) } getOrElse tree @@ -152,12 +148,12 @@ object TestModify extends RedBlackTest { } object TestDelete extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ + import collection.immutable.RedBlack._ override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { case (key, _) => tree delete key } getOrElse tree @@ -169,17 +165,17 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { } object TestRange extends RedBlackTest with RedBlackInvariants { - import RedBlackTest._ + import collection.immutable.RedBlack._ override type ModifyParm = (Option[Int], Option[Int]) - override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for { + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { from <- choose(0, tree.iterator.size) to <- choose(0, tree.iterator.size) suchThat (from <=) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug } yield (optionalFrom, optionalTo) - override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = { + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) tree range (from, to) -- cgit v1.2.3 From b9699f999da24f72dca65ecfb066b0ac3151f2b5 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 27 Dec 2011 10:23:04 +0100 Subject: Made RedBlack private to the scala.collection.immutable package. Use ArrayStack instead of Stack in TreeIterator for slightly increased performance. --- src/library/scala/collection/immutable/RedBlack.scala | 7 +++---- test/files/scalacheck/redblack.scala | 15 +++++++++------ 2 files changed, 12 insertions(+), 10 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 4b81182657..19e0e5ae55 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -15,7 +15,8 @@ package immutable * * @since 2.3 */ -object RedBlack extends Serializable { +private[immutable] +object RedBlack { private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { case RedTree(k, v, l, r) => BlackTree(k, v, l, r) @@ -302,8 +303,6 @@ object RedBlack extends Serializable { } private[this] class TreeIterator[A, B](tree: NonEmpty[A, B]) extends Iterator[(A, B)] { - import collection.mutable.Stack - override def hasNext: Boolean = !next.isEmpty override def next: (A, B) = next match { @@ -326,7 +325,7 @@ object RedBlack extends Serializable { } } - private[this] val path: Stack[NonEmpty[A, B]] = Stack.empty[NonEmpty[A, B]] + private[this] val path = mutable.ArrayStack.empty[NonEmpty[A, B]] addLeftMostBranchToPath(tree) private[this] var next: Tree[A, B] = path.pop() } diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala index 011a5d0ca5..78fb645ce8 100644 --- a/test/files/scalacheck/redblack.scala +++ b/test/files/scalacheck/redblack.scala @@ -1,3 +1,4 @@ +import collection.immutable._ import org.scalacheck._ import Prop._ import Gen._ @@ -14,11 +15,12 @@ Both children of every red node are black. Every simple path from a given node to any of its descendant leaves contains the same number of black nodes. */ +package scala.collection.immutable { abstract class RedBlackTest extends Properties("RedBlack") { def minimumSize = 0 def maximumSize = 5 - import collection.immutable.RedBlack._ + import RedBlack._ def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) Some(tree.iterator.drop(n).next) @@ -64,7 +66,7 @@ abstract class RedBlackTest extends Properties("RedBlack") { trait RedBlackInvariants { self: RedBlackTest => - import collection.immutable.RedBlack._ + import RedBlack._ def rootIsBlack[A](t: Tree[String, A]) = t.isBlack @@ -110,7 +112,7 @@ trait RedBlackInvariants { } object TestInsert extends RedBlackTest with RedBlackInvariants { - import collection.immutable.RedBlack._ + import RedBlack._ override type ModifyParm = Int override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) @@ -130,7 +132,7 @@ object TestInsert extends RedBlackTest with RedBlackInvariants { } object TestModify extends RedBlackTest { - import collection.immutable.RedBlack._ + import RedBlack._ def newValue = 1 override def minimumSize = 1 @@ -148,7 +150,7 @@ object TestModify extends RedBlackTest { } object TestDelete extends RedBlackTest with RedBlackInvariants { - import collection.immutable.RedBlack._ + import RedBlack._ override def minimumSize = 1 override type ModifyParm = Int @@ -165,7 +167,7 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { } object TestRange extends RedBlackTest with RedBlackInvariants { - import collection.immutable.RedBlack._ + import RedBlack._ override type ModifyParm = (Option[Int], Option[Int]) override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { @@ -199,6 +201,7 @@ object TestRange extends RedBlackTest with RedBlackInvariants { filteredTree == newTree.iterator.map(_._1).toList } } +} object Test extends Properties("RedBlack") { include(TestInsert) -- cgit v1.2.3 From 32171c27ec84bd770912149473a83e1b88c2ddc0 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 27 Dec 2011 14:49:28 +0100 Subject: TreeMap/TreeSet no longer keep track of the size (the RedBlack tree already does so). --- .../scala/collection/immutable/TreeMap.scala | 35 +++++++++------------ .../scala/collection/immutable/TreeSet.scala | 36 ++++++++++------------ 2 files changed, 31 insertions(+), 40 deletions(-) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index bab60f06fb..43c0d99875 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -23,7 +23,6 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { def empty[A, B](implicit ord: Ordering[A]) = new TreeMap[A, B]()(ord) /** $sortedMapCanBuildFromInfo */ implicit def canBuildFrom[A, B](implicit ord: Ordering[A]): CanBuildFrom[Coll, (A, B), TreeMap[A, B]] = new SortedMapCanBuildFrom[A, B] - private def make[A, B](s: Int, t: RedBlack.Tree[A, B])(implicit ord: Ordering[A]) = new TreeMap[A, B](s, t)(ord) } /** This class implements immutable maps using a tree. @@ -46,7 +45,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit val ordering: Ordering[A]) +class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: Ordering[A]) extends SortedMap[A, B] with SortedMapLike[A, B, TreeMap[A, B]] with MapLike[A, B, TreeMap[A, B]] @@ -59,32 +58,32 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit va override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = TreeMap.newBuilder[A, B] - def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) + override def size = tree.count - protected val tree: RedBlack.Tree[A, B] = if (size == 0) Empty.empty else t + def this()(implicit ordering: Ordering[A]) = this(RedBlack.Empty.empty)(ordering) override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { val ntree = tree.range(from,until) - new TreeMap[A,B](ntree.count, ntree) + new TreeMap[A,B](ntree) } - override def firstKey = t.first - override def lastKey = t.last + override def firstKey = tree.first + override def lastKey = tree.last override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) override def head = { - val smallest = t.smallest + val smallest = tree.smallest (smallest.key, smallest.value) } - override def headOption = if (t.isEmpty) None else Some(head) + override def headOption = if (tree.isEmpty) None else Some(head) override def last = { - val greatest = t.greatest + val greatest = tree.greatest (greatest.key, greatest.value) } - override def lastOption = if (t.isEmpty) None else Some(last) + override def lastOption = if (tree.isEmpty) None else Some(last) - override def tail = new TreeMap(size - 1, tree.delete(firstKey)) - override def init = new TreeMap(size - 1, tree.delete(lastKey)) + override def tail = new TreeMap(tree.delete(firstKey)) + override def init = new TreeMap(tree.delete(lastKey)) override def drop(n: Int) = { if (n <= 0) this @@ -135,10 +134,7 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit va * @param value the value to be associated with `key` * @return a new $coll with the updated binding */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { - val newsize = if (tree.lookup(key).isEmpty) size + 1 else size - TreeMap.make(newsize, tree.update(key, value)) - } + override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(tree.update(key, value)) /** Add a key/value pair to this map. * @tparam B1 type of the value of the new binding, a supertype of `B` @@ -180,13 +176,12 @@ class TreeMap[A, +B](override val size: Int, t: RedBlack.Tree[A, B])(implicit va */ def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { assert(tree.lookup(key).isEmpty) - TreeMap.make(size + 1, tree.update(key, value)) + new TreeMap(tree.update(key, value)) } def - (key:A): TreeMap[A, B] = if (tree.lookup(key).isEmpty) this - else if (size == 1) empty - else TreeMap.make(size - 1, tree.delete(key)) + else new TreeMap(tree.delete(key)) /** Check if this map maps `key` to a value and return the * value if it exists. diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 2e6ba17749..55d2c0b2c1 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -46,22 +46,23 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(-234066569443569402L) -class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) - (implicit val ordering: Ordering[A]) +@SerialVersionUID(-5685982407650748405L) +class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: Ordering[A]) extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { import RedBlack._ override def stringPrefix = "TreeSet" - override def head = t.smallest.key - override def headOption = if (t.isEmpty) None else Some(head) - override def last = t.greatest.key - override def lastOption = if (t.isEmpty) None else Some(last) + override def size = tree.count - override def tail = new TreeSet(size - 1, tree.delete(firstKey)) - override def init = new TreeSet(size - 1, tree.delete(lastKey)) + override def head = tree.smallest.key + override def headOption = if (tree.isEmpty) None else Some(head) + override def last = tree.greatest.key + override def lastOption = if (tree.isEmpty) None else Some(last) + + override def tail = new TreeSet(tree.delete(firstKey)) + override def init = new TreeSet(tree.delete(lastKey)) override def drop(n: Int) = { if (n <= 0) this @@ -101,11 +102,9 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) def isSmaller(x: A, y: A) = compare(x,y) < 0 - def this()(implicit ordering: Ordering[A]) = this(0, null)(ordering) - - protected val tree: RedBlack.Tree[A, Unit] = if (size == 0) Empty.empty else t + def this()(implicit ordering: Ordering[A]) = this(RedBlack.Empty.empty)(ordering) - private def newSet(s: Int, t: RedBlack.Tree[A, Unit]) = new TreeSet[A](s, t) + private def newSet(t: RedBlack.Tree[A, Unit]) = new TreeSet[A](t) /** A factory to create empty sets of the same type of keys. */ @@ -116,10 +115,7 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) * @param elem a new element to add. * @return a new $coll containing `elem` and all the elements of this $coll. */ - def + (elem: A): TreeSet[A] = { - val newsize = if (tree.lookup(elem).isEmpty) size + 1 else size - newSet(newsize, tree.update(elem, ())) - } + def + (elem: A): TreeSet[A] = newSet(tree.update(elem, ())) /** A new `TreeSet` with the entry added is returned, * assuming that elem is not in the TreeSet. @@ -129,7 +125,7 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) */ def insert(elem: A): TreeSet[A] = { assert(tree.lookup(elem).isEmpty) - newSet(size + 1, tree.update(elem, ())) + newSet(tree.update(elem, ())) } /** Creates a new `TreeSet` with the entry removed. @@ -139,7 +135,7 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) */ def - (elem:A): TreeSet[A] = if (tree.lookup(elem).isEmpty) this - else newSet(size - 1, tree delete elem) + else newSet(tree delete elem) /** Checks if this set contains element `elem`. * @@ -161,7 +157,7 @@ class TreeSet[A](override val size: Int, t: RedBlack.Tree[A, Unit]) override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = { val tree = this.tree.range(from, until) - newSet(tree.count, tree) + newSet(tree) } override def firstKey = tree.first override def lastKey = tree.last -- cgit v1.2.3 From b421bba4f570032a23623cfeff41198aabc1d614 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 27 Dec 2011 15:30:45 +0100 Subject: Performance improvements for iteration (foreach and iterator). --- .../scala/collection/immutable/RedBlack.scala | 59 +++++++++++++++++----- .../scala/collection/immutable/TreeMap.scala | 4 +- .../scala/collection/immutable/TreeSet.scala | 6 +-- 3 files changed, 51 insertions(+), 18 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 19e0e5ae55..8e10a8ac4d 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -32,9 +32,10 @@ object RedBlack { def update[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(k, v)) def delete(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(k)) def range(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(from, until)) - def foreach[U](f: (A, B) => U) - def toStream: Stream[(A,B)] + def foreach[U](f: ((A, B)) => U) + def foreachKey[U](f: A => U) def iterator: Iterator[(A, B)] + def keyIterator: Iterator[A] def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] def smallest: NonEmpty[A, B] @@ -150,14 +151,19 @@ object RedBlack { def smallest: NonEmpty[A, B] = if (left eq Empty.Instance) this else left.smallest def greatest: NonEmpty[A, B] = if (right eq Empty.Instance) this else right.greatest - def toStream: Stream[(A,B)] = iterator.toStream - def iterator: Iterator[(A, B)] = new TreeIterator(this) + def keyIterator: Iterator[A] = new TreeKeyIterator(this) + + override def foreach[U](f: ((A, B)) => U) { + if (left ne Empty.Instance) left foreach f + f((key, value)) + if (right ne Empty.Instance) right foreach f + } - def foreach[U](f: (A, B) => U) { - left foreach f - f(key, value) - right foreach f + override def foreachKey[U](f: A => U) { + if (left ne Empty.Instance) left foreachKey f + f(key) + if (right ne Empty.Instance) right foreachKey f } override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { @@ -275,9 +281,10 @@ object RedBlack { def smallest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") def greatest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") def iterator: Iterator[(A, Nothing)] = Iterator.empty - def toStream: Stream[(A,Nothing)] = Stream.empty + def keyIterator: Iterator[A] = Iterator.empty - def foreach[U](f: (A, Nothing) => U) {} + override def foreach[U](f: ((A, Nothing)) => U) {} + override def foreachKey[U](f: A => U) {} def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) = this def first = throw new NoSuchElementException("empty map") @@ -303,16 +310,15 @@ object RedBlack { } private[this] class TreeIterator[A, B](tree: NonEmpty[A, B]) extends Iterator[(A, B)] { - override def hasNext: Boolean = !next.isEmpty + override def hasNext: Boolean = next ne Empty.Instance override def next: (A, B) = next match { case Empty.Instance => throw new NoSuchElementException("next on empty iterator") case tree: NonEmpty[A, B] => - val result = (tree.key, tree.value) addLeftMostBranchToPath(tree.right) next = if (path.isEmpty) Empty.empty else path.pop() - result + (tree.key, tree.value) } @annotation.tailrec @@ -329,4 +335,31 @@ object RedBlack { addLeftMostBranchToPath(tree) private[this] var next: Tree[A, B] = path.pop() } + + private[this] class TreeKeyIterator[A](tree: NonEmpty[A, _]) extends Iterator[A] { + override def hasNext: Boolean = next ne Empty.Instance + + override def next: A = next match { + case Empty.Instance => + throw new NoSuchElementException("next on empty iterator") + case tree: NonEmpty[A, _] => + addLeftMostBranchToPath(tree.right) + next = if (path.isEmpty) Empty.empty else path.pop() + tree.key + } + + @annotation.tailrec + private[this] def addLeftMostBranchToPath(tree: Tree[A, _]) { + tree match { + case Empty.Instance => + case tree: NonEmpty[A, _] => + path.push(tree) + addLeftMostBranchToPath(tree.left) + } + } + + private[this] val path = mutable.ArrayStack.empty[NonEmpty[A, _]] + addLeftMostBranchToPath(tree) + private[this] var next: Tree[A, _] = path.pop() + } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 43c0d99875..bb54688e72 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -201,9 +201,9 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: */ def iterator: Iterator[(A, B)] = tree.iterator - override def toStream: Stream[(A, B)] = tree.toStream + override def toStream: Stream[(A, B)] = tree.iterator.toStream - override def foreach[U](f : ((A,B)) => U) = tree foreach { case (x, y) => f(x, y) } + override def foreach[U](f : ((A,B)) => U) = tree foreach f } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 55d2c0b2c1..b9b5e12b1e 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -149,11 +149,11 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * * @return the new iterator */ - def iterator: Iterator[A] = tree.iterator map (_._1) + def iterator: Iterator[A] = tree.keyIterator - override def toStream: Stream[A] = tree.toStream map (_._1) + override def toStream: Stream[A] = tree.keyIterator.toStream - override def foreach[U](f: A => U) = tree foreach { (x, y) => f(x) } + override def foreach[U](f: A => U) = tree foreachKey f override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = { val tree = this.tree.range(from, until) -- cgit v1.2.3 From 4a0c4bbd092af26c8c6eea10e668e2cbc3c366a7 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 27 Dec 2011 15:44:33 +0100 Subject: Improved performance of RedBlack.NonEmpty.nth (helps take/drop/split/etc). --- src/library/scala/collection/immutable/RedBlack.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 8e10a8ac4d..3922aded5e 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -262,8 +262,9 @@ object RedBlack { def last = if (right eq Empty.Instance) key else right.last val count = 1 + left.count + right.count protected[immutable] def nth(n: Int) = { - if (n < left.count) left.nth(n) - else if (n > left.count) right.nth(n - left.count - 1) + val count = left.count + if (n < count) left.nth(n) + else if (n > count) right.nth(n - count - 1) else this } } -- cgit v1.2.3 From ad0b09c0c9606d43df7e3a76c535b3943e8d583a Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Wed, 28 Dec 2011 10:21:56 +0100 Subject: Added some tests for TreeMap/TreeSet. --- test/files/scalacheck/treemap.scala | 93 +++++++++++++++++++++++++++++++++++++ test/files/scalacheck/treeset.scala | 89 +++++++++++++++++++++++++++++++++++ 2 files changed, 182 insertions(+) create mode 100644 test/files/scalacheck/treemap.scala create mode 100644 test/files/scalacheck/treeset.scala diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala new file mode 100644 index 0000000000..43d307600d --- /dev/null +++ b/test/files/scalacheck/treemap.scala @@ -0,0 +1,93 @@ +import collection.immutable._ +import org.scalacheck._ +import Prop._ +import Gen._ +import Arbitrary._ +import util._ +import Buildable._ + +object Test extends Properties("TreeMap") { + implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary]: Arbitrary[TreeMap[A, B]] = + Arbitrary(for { + keys <- listOf(arbitrary[A]) + values <- listOfN(keys.size, arbitrary[B]) + } yield TreeMap(keys zip values: _*)) + + property("foreach/iterator consistency") = forAll { (subject: TreeMap[Int, String]) => + val it = subject.iterator + var consistent = true + subject.foreach { element => + consistent &&= it.hasNext && element == it.next + } + consistent + } + + property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> { + subject.zip(subject.tail).forall { case (x, y) => x._1 < y._1 } + }} + + property("contains all") = forAll { (arr: List[(Int, String)]) => + val subject = TreeMap(arr: _*) + arr.map(_._1).forall(subject.contains(_)) + } + + property("size") = forAll { (elements: List[(Int, Int)]) => + val subject = TreeMap(elements: _*) + elements.map(_._1).distinct.size == subject.size + } + + property("toSeq") = forAll { (elements: List[(Int, Int)]) => + val subject = TreeMap(elements: _*) + elements.map(_._1).distinct.sorted == subject.toSeq.map(_._1) + } + + property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { + val subject = TreeMap(elements zip elements: _*) + elements.min == subject.head._1 + }} + + property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { + val subject = TreeMap(elements zip elements: _*) + elements.max == subject.last._1 + }} + + property("head/tail identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { + subject == (subject.tail + subject.head) + }} + + property("init/last identity") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { + subject == (subject.init + subject.last) + }} + + property("take") = forAll { (subject: TreeMap[Int, String]) => + val n = choose(0, subject.size).sample.get + n == subject.take(n).size && subject.take(n).forall(elt => subject.get(elt._1) == Some(elt._2)) + } + + property("drop") = forAll { (subject: TreeMap[Int, String]) => + val n = choose(0, subject.size).sample.get + (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(elt => subject.get(elt._1) == Some(elt._2)) + } + + property("take/drop identity") = forAll { (subject: TreeMap[Int, String]) => + val n = choose(-1, subject.size + 1).sample.get + subject == subject.take(n) ++ subject.drop(n) + } + + property("splitAt") = forAll { (subject: TreeMap[Int, String]) => + val n = choose(-1, subject.size + 1).sample.get + val (prefix, suffix) = subject.splitAt(n) + prefix == subject.take(n) && suffix == subject.drop(n) + } + + property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { + val key = oneOf(subject.keys.toSeq).sample.get + val removed = subject - key + subject.contains(key) && !removed.contains(key) && subject.size - 1 == removed.size + }} + + property("remove all") = forAll { (subject: TreeMap[Int, String]) => + val result = subject.foldLeft(subject)((acc, elt) => acc - elt._1) + result.isEmpty + } +} diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala new file mode 100644 index 0000000000..3cefef7040 --- /dev/null +++ b/test/files/scalacheck/treeset.scala @@ -0,0 +1,89 @@ +import collection.immutable._ +import org.scalacheck._ +import Prop._ +import Gen._ +import Arbitrary._ +import util._ + +object Test extends Properties("TreeSet") { + implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = + Arbitrary(listOf(arbitrary[A]) map (elements => TreeSet(elements: _*))) + + property("foreach/iterator consistency") = forAll { (subject: TreeSet[Int]) => + val it = subject.iterator + var consistent = true + subject.foreach { element => + consistent &&= it.hasNext && element == it.next + } + consistent + } + + property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> { + subject.zip(subject.tail).forall { case (x, y) => x < y } + }} + + property("contains all") = forAll { (elements: List[Int]) => + val subject = TreeSet(elements: _*) + elements.forall(subject.contains) + } + + property("size") = forAll { (elements: List[Int]) => + val subject = TreeSet(elements: _*) + elements.distinct.size == subject.size + } + + property("toSeq") = forAll { (elements: List[Int]) => + val subject = TreeSet(elements: _*) + elements.distinct.sorted == subject.toSeq + } + + property("head") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { + val subject = TreeSet(elements: _*) + elements.min == subject.head + }} + + property("last") = forAll { (elements: List[Int]) => elements.nonEmpty ==> { + val subject = TreeSet(elements: _*) + elements.max == subject.last + }} + + property("head/tail identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { + subject == (subject.tail + subject.head) + }} + + property("init/last identity") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { + subject == (subject.init + subject.last) + }} + + property("take") = forAll { (subject: TreeSet[Int]) => + val n = choose(0, subject.size).sample.get + n == subject.take(n).size && subject.take(n).forall(subject.contains) + } + + property("drop") = forAll { (subject: TreeSet[Int]) => + val n = choose(0, subject.size).sample.get + (subject.size - n) == subject.drop(n).size && subject.drop(n).forall(subject.contains) + } + + property("take/drop identity") = forAll { (subject: TreeSet[Int]) => + val n = choose(-1, subject.size + 1).sample.get + subject == subject.take(n) ++ subject.drop(n) + } + + property("splitAt") = forAll { (subject: TreeSet[Int]) => + val n = choose(-1, subject.size + 1).sample.get + val (prefix, suffix) = subject.splitAt(n) + prefix == subject.take(n) && suffix == subject.drop(n) + } + + property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { + val element = oneOf(subject.toSeq).sample.get + val removed = subject - element + subject.contains(element) && !removed.contains(element) && subject.size - 1 == removed.size + }} + + property("remove all") = forAll { (subject: TreeSet[Int]) => + val result = subject.foldLeft(subject)((acc, elt) => acc - elt) + result.isEmpty + } +} -- cgit v1.2.3 From c51bdeaa6b85e132f24480fa93ded440c3511ab3 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Wed, 28 Dec 2011 10:50:25 +0100 Subject: Minimize number of calls to ordering. --- .../scala/collection/immutable/RedBlack.scala | 27 +++++++++++----------- 1 file changed, 14 insertions(+), 13 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 3922aded5e..e47cc3bedd 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -52,10 +52,12 @@ object RedBlack { def value: B def left: Tree[A, B] def right: Tree[A, B] - def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = - if (ordering.lt(k, key)) left.lookup(k) - else if (ordering.lt(key, k)) right.lookup(k) + def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + val cmp = ordering.compare(k, key) + if (cmp < 0) left.lookup(k) + else if (cmp > 0) right.lookup(k) else this + } private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = l match { case RedTree(y, yv, RedTree(x, xv, a, b), c) => RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) @@ -73,8 +75,9 @@ object RedBlack { mkTree(isBlack, x, xv, a, r) } def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = { - if (ordering.lt(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) - else if (ordering.lt(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) + val cmp = ordering.compare(k, key) + if (cmp < 0) balanceLeft(isBlack, key, value, left.upd(k, v), right) + else if (cmp > 0) balanceRight(isBlack, key, value, left, right.upd(k, v)) else mkTree(isBlack, k, v, left, right) } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees @@ -140,12 +143,11 @@ object RedBlack { case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c) case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c)) } - // RedBlack is neither A : Ordering[A], nor A <% Ordered[A] - k match { - case _ if ordering.lt(k, key) => delLeft - case _ if ordering.lt(key, k) => delRight - case _ => append(left, right) - } + + val cmp = ordering.compare(k, key) + if (cmp < 0) delLeft + else if (cmp > 0) delRight + else append(left, right) } def smallest: NonEmpty[A, B] = if (left eq Empty.Instance) this else left.smallest @@ -169,8 +171,7 @@ object RedBlack { override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { if (from == None && until == None) return this if (from != None && ordering.lt(key, from.get)) return right.rng(from, until); - if (until != None && (ordering.lt(until.get,key) || !ordering.lt(key,until.get))) - return left.rng(from, until); + if (until != None && ordering.lteq(until.get, key)) return left.rng(from, until); val newLeft = left.rng(from, None) val newRight = right.rng(None, until) if ((newLeft eq left) && (newRight eq right)) this -- cgit v1.2.3 From 6d8dca7a00eef3ce156abcf2e41a5fd5867688b8 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 2 Jan 2012 15:55:47 +0100 Subject: Moved key/value/left/right fields up to NonEmpty class. Don't rely on pattern matching for updating the tree. --- .../scala/collection/immutable/RedBlack.scala | 86 ++++++++++++++-------- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 2 +- 3 files changed, 57 insertions(+), 33 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index e47cc3bedd..949ab557ba 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -18,14 +18,24 @@ package immutable private[immutable] object RedBlack { - private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t match { - case RedTree(k, v, l, r) => BlackTree(k, v, l, r) - case t => t - } + private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t.black + private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - + def isRed[A, B](tree: Tree[A, B]) = !tree.isBlack + + @annotation.tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq Empty.Instance) tree else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } sealed abstract class Tree[A, +B] extends Serializable { + def key: A + def value: B + def left: Tree[A, B] + def right: Tree[A, B] def isEmpty: Boolean def isBlack: Boolean def lookup(x: A)(implicit ordering: Ordering[A]): Tree[A, B] @@ -45,33 +55,31 @@ object RedBlack { def last : A def count : Int protected[immutable] def nth(n: Int): NonEmpty[A, B] + def black: Tree[A, B] = this + def red: Tree[A, B] } - sealed abstract class NonEmpty[A, +B] extends Tree[A, B] with Serializable { + sealed abstract class NonEmpty[A, +B](final val key: A, final val value: B, final val left: Tree[A, B], final val right: Tree[A, B]) extends Tree[A, B] with Serializable { def isEmpty = false - def key: A - def value: B - def left: Tree[A, B] - def right: Tree[A, B] def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { val cmp = ordering.compare(k, key) if (cmp < 0) left.lookup(k) else if (cmp > 0) right.lookup(k) else this } - private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = l match { - case RedTree(y, yv, RedTree(x, xv, a, b), c) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case RedTree(x, xv, a, RedTree(y, yv, b, c)) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case _ => + private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { + if (isRed(l) && isRed(l.left)) + RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) + else if (isRed(l) && isRed(l.right)) + RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) + else mkTree(isBlack, z, zv, l, d) } - private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1])/*: NonEmpty[A, B1]*/ = r match { - case RedTree(z, zv, RedTree(y, yv, b, c), d) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case RedTree(y, yv, b, RedTree(z, zv, c, d)) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case _ => + private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { + if (isRed(r) && isRed(r.left)) + RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) + else if (isRed(r) && isRed(r.right)) + RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) + else mkTree(isBlack, x, xv, a, r) } def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = { @@ -272,9 +280,13 @@ object RedBlack { object Empty { def empty[A]: Tree[A, Nothing] = Instance.asInstanceOf[Tree[A, Nothing]] - val Instance: Tree[_ >: Nothing, Nothing] = Empty[Nothing]() + final val Instance: Tree[_ >: Nothing, Nothing] = Empty[Nothing]() } final case class Empty[A] private () extends Tree[A, Nothing] { + def key = throw new NoSuchElementException("empty map") + def value = throw new NoSuchElementException("empty map") + def left = this + def right = this def isEmpty = true def isBlack = true def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this @@ -293,22 +305,34 @@ object RedBlack { def last = throw new NoSuchElementException("empty map") def count = 0 protected[immutable] def nth(n: Int) = throw new NoSuchElementException("empty map") + override def red = sys.error("cannot make leaf red") override def toString() = "Empty" private def readResolve() = Empty.empty } - final case class RedTree[A, +B](override val key: A, - override val value: B, - override val left: Tree[A, B], - override val right: Tree[A, B]) extends NonEmpty[A, B] { + final class RedTree[A, +B](key: A, + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends NonEmpty[A, B](key, value, left, right) { def isBlack = false + override def black = BlackTree(key, value, left, right) + override def red = this + } + object RedTree { + def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) + def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } - final case class BlackTree[A, +B](override val key: A, - override val value: B, - override val left: Tree[A, B], - override val right: Tree[A, B]) extends NonEmpty[A, B] { + final class BlackTree[A, +B](key: A, + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends NonEmpty[A, B](key, value, left, right) { def isBlack = true + override def red = RedTree(key, value, left, right) + } + object BlackTree { + def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) + def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } private[this] class TreeIterator[A, B](tree: NonEmpty[A, B]) extends Iterator[(A, B)] { diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index bb54688e72..48a0bc3d44 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -189,7 +189,7 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: * @param key the key of the mapping of interest * @return the value of the mapping, if it exists */ - override def get(key: A): Option[B] = tree.lookup(key) match { + override def get(key: A): Option[B] = lookup(tree, key) match { case n: NonEmpty[_, _] => Some(n.value) case _ => None } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index b9b5e12b1e..74c63d0eb5 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -142,7 +142,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * @param elem the element to check for membership. * @return true, iff `elem` is contained in this set. */ - def contains(elem: A): Boolean = !tree.lookup(elem).isEmpty + def contains(elem: A): Boolean = !lookup(tree, elem).isEmpty /** Creates a new iterator over all elements contained in this * object. -- cgit v1.2.3 From 82374ad4c2c518ef8ee3fe3d2ef3e72cce75d4f1 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 2 Jan 2012 16:35:58 +0100 Subject: Implemented deletes without pattern matching. --- .../scala/collection/immutable/RedBlack.scala | 133 +++++++++++---------- 1 file changed, 72 insertions(+), 61 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 949ab557ba..57b08c2b8c 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -22,7 +22,8 @@ object RedBlack { private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - def isRed[A, B](tree: Tree[A, B]) = !tree.isBlack + def isRedTree[A, B](tree: Tree[A, B]) = tree.isInstanceOf[RedTree[_, _]] + def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]] @annotation.tailrec def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq Empty.Instance) tree else { @@ -67,17 +68,17 @@ object RedBlack { else this } private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { - if (isRed(l) && isRed(l.left)) + if (isRedTree(l) && isRedTree(l.left)) RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) - else if (isRed(l) && isRed(l.right)) + else if (isRedTree(l) && isRedTree(l.right)) RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) else mkTree(isBlack, z, zv, l, d) } private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { - if (isRed(r) && isRed(r.left)) + if (isRedTree(r) && isRedTree(r.left)) RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) - else if (isRed(r) && isRed(r.right)) + else if (isRedTree(r) && isRedTree(r.right)) RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) else mkTree(isBlack, x, xv, a, r) @@ -91,65 +92,75 @@ object RedBlack { // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { - case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) => - RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d)) - case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) => - RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d)) - case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) => - RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d)) - case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) => - RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) - case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) => - RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) - case (a, b) => - BlackTree(x, xv, a, b) - } - def subl(t: Tree[A, B]) = t match { - case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b) - case _ => sys.error("Defect: invariance violation; expected black, got "+t) - } - def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { - case (RedTree(y, yv, a, b), c) => - RedTree(x, xv, BlackTree(y, yv, a, b), c) - case (bl, BlackTree(y, yv, a, b)) => - balance(x, xv, bl, RedTree(y, yv, a, b)) - case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) => - RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c))) - case _ => sys.error("Defect: invariance violation at "+right) - } - def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = (tl, tr) match { - case (a, RedTree(y, yv, b, c)) => - RedTree(x, xv, a, BlackTree(y, yv, b, c)) - case (BlackTree(y, yv, a, b), bl) => - balance(x, xv, RedTree(y, yv, a, b), bl) - case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) => - RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl)) - case _ => sys.error("Defect: invariance violation at "+left) + def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + if (isRedTree(tr)) { + RedTree(x, xv, tl.black, tr.black) + } else if (isRedTree(tl.left)) { + RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) + } else if (isRedTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + BlackTree(x, xv, tl, tr) + } + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) { + RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) + } else if (isRedTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) + } else { + BlackTree(x, xv, tl, tr) + } + } else { + BlackTree(x, xv, tl, tr) } - def delLeft = left match { - case _: BlackTree[_, _] => balLeft(key, value, left.del(k), right) - case _ => RedTree(key, value, left.del(k), right) + def subl(t: Tree[A, B]) = + if (t.isInstanceOf[BlackTree[_, _]]) t.red + else sys.error("Defect: invariance violation; expected black, got "+t) + + def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + RedTree(x, xv, tl.black, tr) + } else if (isBlackTree(tr)) { + balance(x, xv, tl, tr.red) + } else if (isRedTree(tr) && isBlackTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) + } else { + sys.error("Defect: invariance violation at "+right) } - def delRight = right match { - case _: BlackTree[_, _] => balRight(key, value, left, right.del(k)) - case _ => RedTree(key, value, left, right.del(k)) + def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { + RedTree(x, xv, tl, tr.black) + } else if (isBlackTree(tl)) { + balance(x, xv, tl.red, tr) + } else if (isRedTree(tl) && isBlackTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + sys.error("Defect: invariance violation at "+left) } - def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = (tl, tr) match { - case (Empty.Instance, t) => t - case (t, Empty.Instance) => t - case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => - append(b, c) match { - case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d)) - case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d)) - } - case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) => - append(b, c) match { - case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d)) - case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d)) - } - case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c) - case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c)) + def delLeft = if (isBlackTree(left)) balLeft(key, value, left.del(k), right) else RedTree(key, value, left.del(k), right) + def delRight = if (isBlackTree(right)) balRight(key, value, left, right.del(k)) else RedTree(key, value, left, right.del(k)) + def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq Empty.Instance) { + tr + } else if (tr eq Empty.Instance) { + tl + } else if (isRedTree(tl) && isRedTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) + } else { + RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) + } + } else if (isBlackTree(tl) && isBlackTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) + } else { + balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) + } + } else if (isRedTree(tr)) { + RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) + } else if (isRedTree(tl)) { + RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) + } else { + sys.error("unmatched tree on append: " + tl + ", " + tr) } val cmp = ordering.compare(k, key) -- cgit v1.2.3 From 3dea25186670096b25150baba981eb36ef244a5f Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 2 Jan 2012 16:49:29 +0100 Subject: Implemented range without using pattern matching. --- .../scala/collection/immutable/RedBlack.scala | 23 +++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 57b08c2b8c..3b16f719bf 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -224,23 +224,28 @@ object RedBlack { right: Tree[A, B], leftZipper: List[NonEmpty[A, B]], rightZipper: List[NonEmpty[A, B]], - smallerDepth: Int): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = (left, right) match { - case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) => + smallerDepth: Int): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = { + lazy val l = left.asInstanceOf[NonEmpty[A, B]] + lazy val r = right.asInstanceOf[NonEmpty[A, B]] + if (isBlackTree(left) && isBlackTree(right)) { unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1) - case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) => + } else if (isRedTree(left) && isRedTree(right)) { unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth) - case (_, r @ RedTree(_, _, _, _)) => + } else if (isRedTree(right)) { unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) - case (l @ RedTree(_, _, _, _), _) => + } else if (isRedTree(left)) { unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) - case (Empty.Instance, Empty.Instance) => + } else if ((left eq Empty.Instance) && (right eq Empty.Instance)) { (Nil, true, false, smallerDepth) - case (Empty.Instance, r @ BlackTree(_, _, _, _)) => + } else if ((left eq Empty.Instance) && isBlackTree(right)) { val leftMost = true (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) - case (l @ BlackTree(_, _, _, _), Empty.Instance) => + } else if (isBlackTree(left) && (right eq Empty.Instance)) { val leftMost = false (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) + } else { + sys.error("unmatched trees in unzip: " + left + ", " + right) + } } unzipBoth(left, right, Nil, Nil, 0) } @@ -248,7 +253,7 @@ object RedBlack { private[this] def rebalance(newLeft: Tree[A, B], newRight: Tree[A, B]) = { // This is like drop(n-1), but only counting black nodes def findDepth(zipper: List[NonEmpty[A, B]], depth: Int): List[NonEmpty[A, B]] = zipper match { - case BlackTree(_, _, _, _) :: tail => + case head :: tail if isBlackTree(head) => if (depth == 1) zipper else findDepth(tail, depth - 1) case _ :: tail => findDepth(tail, depth) case Nil => sys.error("Defect: unexpected empty zipper while computing range") -- cgit v1.2.3 From 5c05f66b619ea9c8a543518fd9d7d601268c6f19 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Mon, 2 Jan 2012 19:48:37 +0100 Subject: Use null to represent empty trees. Removed Empty/NonEmpty classes. --- .../scala/collection/immutable/RedBlack.scala | 569 ++++++++++----------- .../scala/collection/immutable/TreeMap.scala | 46 +- .../scala/collection/immutable/TreeSet.scala | 44 +- test/files/scalacheck/redblack.scala | 112 ++-- 4 files changed, 367 insertions(+), 404 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 3b16f719bf..2537d043fd 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -11,6 +11,8 @@ package scala.collection package immutable +import annotation.meta.getter + /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. * * @since 2.3 @@ -18,389 +20,354 @@ package immutable private[immutable] object RedBlack { - private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = t.black + private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - def isRedTree[A, B](tree: Tree[A, B]) = tree.isInstanceOf[RedTree[_, _]] + + def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree) + def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]] def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]] + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null + def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + @annotation.tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq Empty.Instance) tree else { - val cmp = ordering.compare(x, tree.key) - if (cmp < 0) lookup(tree.left, x) - else if (cmp > 0) lookup(tree.right, x) - else tree + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree } - sealed abstract class Tree[A, +B] extends Serializable { - def key: A - def value: B - def left: Tree[A, B] - def right: Tree[A, B] - def isEmpty: Boolean - def isBlack: Boolean - def lookup(x: A)(implicit ordering: Ordering[A]): Tree[A, B] - def update[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(k, v)) - def delete(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(k)) - def range(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(from, until)) - def foreach[U](f: ((A, B)) => U) - def foreachKey[U](f: A => U) - def iterator: Iterator[(A, B)] - def keyIterator: Iterator[A] - def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] - def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] - def smallest: NonEmpty[A, B] - def greatest: NonEmpty[A, B] - def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] - def first : A - def last : A - def count : Int - protected[immutable] def nth(n: Int): NonEmpty[A, B] - def black: Tree[A, B] = this - def red: Tree[A, B] + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) + def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) + def range[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty map") + var result = tree + while (result.left ne null) result = result.left + result } - sealed abstract class NonEmpty[A, +B](final val key: A, final val value: B, final val left: Tree[A, B], final val right: Tree[A, B]) extends Tree[A, B] with Serializable { - def isEmpty = false - def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - val cmp = ordering.compare(k, key) - if (cmp < 0) left.lookup(k) - else if (cmp > 0) right.lookup(k) - else this - } - private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { - if (isRedTree(l) && isRedTree(l.left)) - RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) - else if (isRedTree(l) && isRedTree(l.right)) - RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) - else - mkTree(isBlack, z, zv, l, d) - } - private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1])/*: NonEmpty[A, B1]*/ = { - if (isRedTree(r) && isRedTree(r.left)) - RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) - else if (isRedTree(r) && isRedTree(r.right)) - RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) - else - mkTree(isBlack, x, xv, a, r) - } - def upd[B1 >: B](k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = { - val cmp = ordering.compare(k, key) - if (cmp < 0) balanceLeft(isBlack, key, value, left.upd(k, v), right) - else if (cmp > 0) balanceRight(isBlack, key, value, left, right.upd(k, v)) - else mkTree(isBlack, k, v, left, right) - } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty map") + var result = tree + while (result.right ne null) result = result.right + result + } + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { + foreach(tree.left, f) + f((tree.key, tree.value)) + foreach(tree.right, f) + } + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) { + foreachKey(tree.left, f) + f(tree.key) + foreachKey(tree.right, f) + } + + def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = if (tree eq null) Iterator.empty else new TreeIterator(tree) + def keyIterator[A, _](tree: Tree[A, _]): Iterator[A] = if (tree eq null) Iterator.empty else new TreeKeyIterator(tree) + + private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { + if (isRedTree(l) && isRedTree(l.left)) + RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) + else if (isRedTree(l) && isRedTree(l.right)) + RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) + else + mkTree(isBlack, z, zv, l, d) + } + private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = { + if (isRedTree(r) && isRedTree(r.left)) + RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) + else if (isRedTree(r) && isRedTree(r.right)) + RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) + else + mkTree(isBlack, x, xv, a, r) + } + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree == null) { + RedTree(k, v, null, null) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) balanceLeft(tree.isBlack, tree.key, tree.value, upd(tree.left, k, v), tree.right) + else if (cmp > 0) balanceRight(tree.isBlack, tree.key, tree.value, tree.left, upd(tree.right, k, v)) + else mkTree(tree.isBlack, k, v, tree.left, tree.right) + } + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - def del(k: A)(implicit ordering: Ordering[A]): Tree[A, B] = { - def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - if (isRedTree(tr)) { - RedTree(x, xv, tl.black, tr.black) - } else if (isRedTree(tl.left)) { - RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) - } else if (isRedTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) - } else { - BlackTree(x, xv, tl, tr) - } - } else if (isRedTree(tr)) { - if (isRedTree(tr.right)) { - RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) - } else if (isRedTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) - } else { - BlackTree(x, xv, tl, tr) - } + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree == null) null else { + def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + if (isRedTree(tr)) { + RedTree(x, xv, tl.black, tr.black) + } else if (isRedTree(tl.left)) { + RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) + } else if (isRedTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) } else { BlackTree(x, xv, tl, tr) } - def subl(t: Tree[A, B]) = - if (t.isInstanceOf[BlackTree[_, _]]) t.red - else sys.error("Defect: invariance violation; expected black, got "+t) - - def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - RedTree(x, xv, tl.black, tr) - } else if (isBlackTree(tr)) { - balance(x, xv, tl, tr.red) - } else if (isRedTree(tr) && isBlackTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) { + RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) + } else if (isRedTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) } else { - sys.error("Defect: invariance violation at "+right) + BlackTree(x, xv, tl, tr) } - def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { - RedTree(x, xv, tl, tr.black) - } else if (isBlackTree(tl)) { - balance(x, xv, tl.red, tr) - } else if (isRedTree(tl) && isBlackTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + BlackTree(x, xv, tl, tr) + } + def subl(t: Tree[A, B]) = + if (t.isInstanceOf[BlackTree[_, _]]) t.red + else sys.error("Defect: invariance violation; expected black, got "+t) + + def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + RedTree(x, xv, tl.black, tr) + } else if (isBlackTree(tr)) { + balance(x, xv, tl, tr.red) + } else if (isRedTree(tr) && isBlackTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) + } else { + sys.error("Defect: invariance violation at ") // TODO + } + def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { + RedTree(x, xv, tl, tr.black) + } else if (isBlackTree(tl)) { + balance(x, xv, tl.red, tr) + } else if (isRedTree(tl) && isBlackTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + sys.error("Defect: invariance violation at ") // TODO + } + def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right) + def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k)) + def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) { + tr + } else if (tr eq null) { + tl + } else if (isRedTree(tl) && isRedTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) } else { - sys.error("Defect: invariance violation at "+left) + RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) } - def delLeft = if (isBlackTree(left)) balLeft(key, value, left.del(k), right) else RedTree(key, value, left.del(k), right) - def delRight = if (isBlackTree(right)) balRight(key, value, left, right.del(k)) else RedTree(key, value, left, right.del(k)) - def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq Empty.Instance) { - tr - } else if (tr eq Empty.Instance) { - tl - } else if (isRedTree(tl) && isRedTree(tr)) { - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) - } else { - RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) - } - } else if (isBlackTree(tl) && isBlackTree(tr)) { - val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) - } else { - balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) - } - } else if (isRedTree(tr)) { - RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) - } else if (isRedTree(tl)) { - RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) + } else if (isBlackTree(tl) && isBlackTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) } else { - sys.error("unmatched tree on append: " + tl + ", " + tr) + balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) } - - val cmp = ordering.compare(k, key) - if (cmp < 0) delLeft - else if (cmp > 0) delRight - else append(left, right) + } else if (isRedTree(tr)) { + RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) + } else if (isRedTree(tl)) { + RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) + } else { + sys.error("unmatched tree on append: " + tl + ", " + tr) } - def smallest: NonEmpty[A, B] = if (left eq Empty.Instance) this else left.smallest - def greatest: NonEmpty[A, B] = if (right eq Empty.Instance) this else right.greatest + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) delLeft + else if (cmp > 0) delRight + else append(tree.left, tree.right) + } - def iterator: Iterator[(A, B)] = new TreeIterator(this) - def keyIterator: Iterator[A] = new TreeKeyIterator(this) + private[this] def rng[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (from == None && until == None) return tree + if (from != None && ordering.lt(tree.key, from.get)) return rng(tree.right, from, until); + if (until != None && ordering.lteq(until.get, tree.key)) return rng(tree.left, from, until); + val newLeft = rng(tree.left, from, None) + val newRight = rng(tree.right, None, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value); + else if (newRight eq null) upd(newLeft, tree.key, tree.value); + else rebalance(tree, newLeft, newRight) + } - override def foreach[U](f: ((A, B)) => U) { - if (left ne Empty.Instance) left foreach f - f((key, value)) - if (right ne Empty.Instance) right foreach f + // The zipper returned might have been traversed left-most (always the left child) + // or right-most (always the right child). Left trees are traversed right-most, + // and right trees are traversed leftmost. + + // Returns the zipper for the side with deepest black nodes depth, a flag + // indicating whether the trees were unbalanced at all, and a flag indicating + // whether the zipper was traversed left-most or right-most. + + // If the trees were balanced, returns an empty zipper + private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = { + // Once a side is found to be deeper, unzip it to the bottom + def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = { + val next = if (leftMost) zipper.head.left else zipper.head.right + next match { + case null => zipper + case node => unzip(node :: zipper, leftMost) + } } - override def foreachKey[U](f: A => U) { - if (left ne Empty.Instance) left foreachKey f - f(key) - if (right ne Empty.Instance) right foreachKey f + // Unzip left tree on the rightmost side and right tree on the leftmost side until one is + // found to be deeper, or the bottom is reached + def unzipBoth(left: Tree[A, B], + right: Tree[A, B], + leftZipper: List[Tree[A, B]], + rightZipper: List[Tree[A, B]], + smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = { + if (isBlackTree(left) && isBlackTree(right)) { + unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1) + } else if (isRedTree(left) && isRedTree(right)) { + unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth) + } else if (isRedTree(right)) { + unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth) + } else if (isRedTree(left)) { + unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth) + } else if ((left eq null) && (right eq null)) { + (Nil, true, false, smallerDepth) + } else if ((left eq null) && isBlackTree(right)) { + val leftMost = true + (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth) + } else if (isBlackTree(left) && (right eq null)) { + val leftMost = false + (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth) + } else { + sys.error("unmatched trees in unzip: " + left + ", " + right) + } } - - override def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { - if (from == None && until == None) return this - if (from != None && ordering.lt(key, from.get)) return right.rng(from, until); - if (until != None && ordering.lteq(until.get, key)) return left.rng(from, until); - val newLeft = left.rng(from, None) - val newRight = right.rng(None, until) - if ((newLeft eq left) && (newRight eq right)) this - else if (newLeft eq Empty.Instance) newRight.upd(key, value); - else if (newRight eq Empty.Instance) newLeft.upd(key, value); - else rebalance(newLeft, newRight) + unzipBoth(left, right, Nil, Nil, 0) + } + private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = { + // This is like drop(n-1), but only counting black nodes + def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match { + case head :: tail if isBlackTree(head) => + if (depth == 1) zipper else findDepth(tail, depth - 1) + case _ :: tail => findDepth(tail, depth) + case Nil => sys.error("Defect: unexpected empty zipper while computing range") } - // The zipper returned might have been traversed left-most (always the left child) - // or right-most (always the right child). Left trees are traversed right-most, - // and right trees are traversed leftmost. - - // Returns the zipper for the side with deepest black nodes depth, a flag - // indicating whether the trees were unbalanced at all, and a flag indicating - // whether the zipper was traversed left-most or right-most. - - // If the trees were balanced, returns an empty zipper - private[this] def compareDepth(left: Tree[A, B], right: Tree[A, B]): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = { - // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: List[NonEmpty[A, B]], leftMost: Boolean): List[NonEmpty[A, B]] = { - val next = if (leftMost) zipper.head.left else zipper.head.right - next match { - case node: NonEmpty[_, _] => unzip(node :: zipper, leftMost) - case _ => zipper - } + // Blackening the smaller tree avoids balancing problems on union; + // this can't be done later, though, or it would change the result of compareDepth + val blkNewLeft = blacken(newLeft) + val blkNewRight = blacken(newRight) + val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) + + if (levelled) { + BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight) + } else { + val zipFrom = findDepth(zipper, smallerDepth) + val union = if (leftMost) { + RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head) + } else { + RedTree(tree.key, tree.value, zipFrom.head, blkNewRight) } - - // Unzip left tree on the rightmost side and right tree on the leftmost side until one is - // found to be deeper, or the bottom is reached - def unzipBoth(left: Tree[A, B], - right: Tree[A, B], - leftZipper: List[NonEmpty[A, B]], - rightZipper: List[NonEmpty[A, B]], - smallerDepth: Int): (List[NonEmpty[A, B]], Boolean, Boolean, Int) = { - lazy val l = left.asInstanceOf[NonEmpty[A, B]] - lazy val r = right.asInstanceOf[NonEmpty[A, B]] - if (isBlackTree(left) && isBlackTree(right)) { - unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1) - } else if (isRedTree(left) && isRedTree(right)) { - unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth) - } else if (isRedTree(right)) { - unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) - } else if (isRedTree(left)) { - unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) - } else if ((left eq Empty.Instance) && (right eq Empty.Instance)) { - (Nil, true, false, smallerDepth) - } else if ((left eq Empty.Instance) && isBlackTree(right)) { - val leftMost = true - (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) - } else if (isBlackTree(left) && (right eq Empty.Instance)) { - val leftMost = false - (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) - } else { - sys.error("unmatched trees in unzip: " + left + ", " + right) - } + val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) => + if (leftMost) + balanceLeft(node.isBlack, node.key, node.value, tree, node.right) + else + balanceRight(node.isBlack, node.key, node.value, node.left, tree) } - unzipBoth(left, right, Nil, Nil, 0) + zippedTree } + } - private[this] def rebalance(newLeft: Tree[A, B], newRight: Tree[A, B]) = { - // This is like drop(n-1), but only counting black nodes - def findDepth(zipper: List[NonEmpty[A, B]], depth: Int): List[NonEmpty[A, B]] = zipper match { - case head :: tail if isBlackTree(head) => - if (depth == 1) zipper else findDepth(tail, depth - 1) - case _ :: tail => findDepth(tail, depth) - case Nil => sys.error("Defect: unexpected empty zipper while computing range") - } - - // Blackening the smaller tree avoids balancing problems on union; - // this can't be done later, though, or it would change the result of compareDepth - val blkNewLeft = blacken(newLeft) - val blkNewRight = blacken(newRight) - val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) - - if (levelled) { - BlackTree(key, value, blkNewLeft, blkNewRight) - } else { - val zipFrom = findDepth(zipper, smallerDepth) - val union = if (leftMost) { - RedTree(key, value, blkNewLeft, zipFrom.head) - } else { - RedTree(key, value, zipFrom.head, blkNewRight) - } - val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) => - if (leftMost) - balanceLeft(node.isBlack, node.key, node.value, tree, node.right) - else - balanceRight(node.isBlack, node.key, node.value, node.left, tree) - } - zippedTree - } - } - def first = if (left eq Empty.Instance) key else left.first - def last = if (right eq Empty.Instance) key else right.last - val count = 1 + left.count + right.count - protected[immutable] def nth(n: Int) = { - val count = left.count + sealed abstract class Tree[A, +B]( + @(inline @getter) final val key: A, + @(inline @getter) final val value: B, + @(inline @getter) final val left: Tree[A, B], + @(inline @getter) final val right: Tree[A, B]) + extends Serializable { + @(inline @getter) final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) + def isBlack: Boolean + def nth(n: Int): Tree[A, B] = { + val count = RedBlack.count(left) if (n < count) left.nth(n) else if (n > count) right.nth(n - count - 1) else this } + def black: Tree[A, B] + def red: Tree[A, B] } - object Empty { - def empty[A]: Tree[A, Nothing] = Instance.asInstanceOf[Tree[A, Nothing]] - final val Instance: Tree[_ >: Nothing, Nothing] = Empty[Nothing]() - } - final case class Empty[A] private () extends Tree[A, Nothing] { - def key = throw new NoSuchElementException("empty map") - def value = throw new NoSuchElementException("empty map") - def left = this - def right = this - def isEmpty = true - def isBlack = true - def lookup(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this - def upd[B](k: A, v: B)(implicit ordering: Ordering[A]): Tree[A, B] = RedTree(k, v, this, this) - def del(k: A)(implicit ordering: Ordering[A]): Tree[A, Nothing] = this - def smallest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") - def greatest: NonEmpty[A, Nothing] = throw new NoSuchElementException("empty map") - def iterator: Iterator[(A, Nothing)] = Iterator.empty - def keyIterator: Iterator[A] = Iterator.empty - - override def foreach[U](f: ((A, Nothing)) => U) {} - override def foreachKey[U](f: A => U) {} - - def rng(from: Option[A], until: Option[A])(implicit ordering: Ordering[A]) = this - def first = throw new NoSuchElementException("empty map") - def last = throw new NoSuchElementException("empty map") - def count = 0 - protected[immutable] def nth(n: Int) = throw new NoSuchElementException("empty map") - override def red = sys.error("cannot make leaf red") - - override def toString() = "Empty" - - private def readResolve() = Empty.empty - } final class RedTree[A, +B](key: A, - value: B, - left: Tree[A, B], - right: Tree[A, B]) extends NonEmpty[A, B](key, value, left, right) { - def isBlack = false + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + override def isBlack = false override def black = BlackTree(key, value, left, right) override def red = this + override def toString = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" } object RedTree { def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } final class BlackTree[A, +B](key: A, - value: B, - left: Tree[A, B], - right: Tree[A, B]) extends NonEmpty[A, B](key, value, left, right) { - def isBlack = true + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + override def isBlack = true + override def black = this override def red = RedTree(key, value, left, right) + override def toString = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" } object BlackTree { def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } - private[this] class TreeIterator[A, B](tree: NonEmpty[A, B]) extends Iterator[(A, B)] { - override def hasNext: Boolean = next ne Empty.Instance + private[this] class TreeIterator[A, B](tree: Tree[A, B]) extends Iterator[(A, B)] { + override def hasNext: Boolean = next ne null override def next: (A, B) = next match { - case Empty.Instance => + case null => throw new NoSuchElementException("next on empty iterator") - case tree: NonEmpty[A, B] => + case tree => addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) Empty.empty else path.pop() + next = if (path.isEmpty) null else path.pop() (tree.key, tree.value) } @annotation.tailrec private[this] def addLeftMostBranchToPath(tree: Tree[A, B]) { - tree match { - case Empty.Instance => - case tree: NonEmpty[A, B] => - path.push(tree) - addLeftMostBranchToPath(tree.left) + if (tree ne null) { + path.push(tree) + addLeftMostBranchToPath(tree.left) } } - private[this] val path = mutable.ArrayStack.empty[NonEmpty[A, B]] + private[this] val path = mutable.ArrayStack.empty[Tree[A, B]] addLeftMostBranchToPath(tree) private[this] var next: Tree[A, B] = path.pop() } - private[this] class TreeKeyIterator[A](tree: NonEmpty[A, _]) extends Iterator[A] { - override def hasNext: Boolean = next ne Empty.Instance + private[this] class TreeKeyIterator[A](tree: Tree[A, _]) extends Iterator[A] { + override def hasNext: Boolean = next ne null override def next: A = next match { - case Empty.Instance => + case null => throw new NoSuchElementException("next on empty iterator") - case tree: NonEmpty[A, _] => + case tree => addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) Empty.empty else path.pop() + next = if (path.isEmpty) null else path.pop() tree.key } @annotation.tailrec private[this] def addLeftMostBranchToPath(tree: Tree[A, _]) { - tree match { - case Empty.Instance => - case tree: NonEmpty[A, _] => - path.push(tree) - addLeftMostBranchToPath(tree.left) + if (tree ne null) { + path.push(tree) + addLeftMostBranchToPath(tree.left) } } - private[this] val path = mutable.ArrayStack.empty[NonEmpty[A, _]] + private[this] val path = mutable.ArrayStack.empty[Tree[A, _]] addLeftMostBranchToPath(tree) private[this] var next: Tree[A, _] = path.pop() } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 48a0bc3d44..45e936444f 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -51,39 +51,39 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: with MapLike[A, B, TreeMap[A, B]] with Serializable { - import RedBlack._ + import immutable.{RedBlack => RB} def isSmaller(x: A, y: A) = ordering.lt(x, y) override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = TreeMap.newBuilder[A, B] - override def size = tree.count + override def size = RB.count(tree) - def this()(implicit ordering: Ordering[A]) = this(RedBlack.Empty.empty)(ordering) + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { - val ntree = tree.range(from,until) + val ntree = RB.range(tree, from,until) new TreeMap[A,B](ntree) } - override def firstKey = tree.first - override def lastKey = tree.last + override def firstKey = RB.smallest(tree).key + override def lastKey = RB.greatest(tree).key override def compare(k0: A, k1: A): Int = ordering.compare(k0, k1) override def head = { - val smallest = tree.smallest + val smallest = RB.smallest(tree) (smallest.key, smallest.value) } - override def headOption = if (tree.isEmpty) None else Some(head) + override def headOption = if (RB.isEmpty(tree)) None else Some(head) override def last = { - val greatest = tree.greatest + val greatest = RB.greatest(tree) (greatest.key, greatest.value) } - override def lastOption = if (tree.isEmpty) None else Some(last) + override def lastOption = if (RB.isEmpty(tree)) None else Some(last) - override def tail = new TreeMap(tree.delete(firstKey)) - override def init = new TreeMap(tree.delete(lastKey)) + override def tail = new TreeMap(RB.delete(tree, firstKey)) + override def init = new TreeMap(RB.delete(tree, lastKey)) override def drop(n: Int) = { if (n <= 0) this @@ -134,7 +134,7 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: * @param value the value to be associated with `key` * @return a new $coll with the updated binding */ - override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(tree.update(key, value)) + override def updated [B1 >: B](key: A, value: B1): TreeMap[A, B1] = new TreeMap(RB.update(tree, key, value)) /** Add a key/value pair to this map. * @tparam B1 type of the value of the new binding, a supertype of `B` @@ -175,13 +175,13 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: * @return a new $coll with the inserted binding, if it wasn't present in the map */ def insert [B1 >: B](key: A, value: B1): TreeMap[A, B1] = { - assert(tree.lookup(key).isEmpty) - new TreeMap(tree.update(key, value)) + assert(!RB.contains(tree, key)) + new TreeMap(RB.update(tree, key, value)) } def - (key:A): TreeMap[A, B] = - if (tree.lookup(key).isEmpty) this - else new TreeMap(tree.delete(key)) + if (!RB.contains(tree, key)) this + else new TreeMap(RB.delete(tree, key)) /** Check if this map maps `key` to a value and return the * value if it exists. @@ -189,21 +189,19 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: * @param key the key of the mapping of interest * @return the value of the mapping, if it exists */ - override def get(key: A): Option[B] = lookup(tree, key) match { - case n: NonEmpty[_, _] => Some(n.value) - case _ => None - } + override def get(key: A): Option[B] = RB.get(tree, key) /** Creates a new iterator over all elements contained in this * object. * * @return the new iterator */ - def iterator: Iterator[(A, B)] = tree.iterator + def iterator: Iterator[(A, B)] = RB.iterator(tree) - override def toStream: Stream[(A, B)] = tree.iterator.toStream + override def contains(key: A): Boolean = RB.contains(tree, key) + override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) - override def foreach[U](f : ((A,B)) => U) = tree foreach f + override def foreach[U](f : ((A,B)) => U) = RB.foreach(tree, f) } diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 74c63d0eb5..00ebeab868 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -50,19 +50,19 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: Ordering[A]) extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { - import RedBlack._ + import immutable.{RedBlack => RB} override def stringPrefix = "TreeSet" - override def size = tree.count + override def size = RB.count(tree) - override def head = tree.smallest.key - override def headOption = if (tree.isEmpty) None else Some(head) - override def last = tree.greatest.key - override def lastOption = if (tree.isEmpty) None else Some(last) + override def head = RB.smallest(tree).key + override def headOption = if (RB.isEmpty(tree)) None else Some(head) + override def last = RB.greatest(tree).key + override def lastOption = if (RB.isEmpty(tree)) None else Some(last) - override def tail = new TreeSet(tree.delete(firstKey)) - override def init = new TreeSet(tree.delete(lastKey)) + override def tail = new TreeSet(RB.delete(tree, firstKey)) + override def init = new TreeSet(RB.delete(tree, lastKey)) override def drop(n: Int) = { if (n <= 0) this @@ -102,7 +102,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O def isSmaller(x: A, y: A) = compare(x,y) < 0 - def this()(implicit ordering: Ordering[A]) = this(RedBlack.Empty.empty)(ordering) + def this()(implicit ordering: Ordering[A]) = this(null)(ordering) private def newSet(t: RedBlack.Tree[A, Unit]) = new TreeSet[A](t) @@ -115,7 +115,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * @param elem a new element to add. * @return a new $coll containing `elem` and all the elements of this $coll. */ - def + (elem: A): TreeSet[A] = newSet(tree.update(elem, ())) + def + (elem: A): TreeSet[A] = newSet(RB.update(tree, elem, ())) /** A new `TreeSet` with the entry added is returned, * assuming that elem is not in the TreeSet. @@ -124,8 +124,8 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * @return a new $coll containing `elem` and all the elements of this $coll. */ def insert(elem: A): TreeSet[A] = { - assert(tree.lookup(elem).isEmpty) - newSet(tree.update(elem, ())) + assert(!RB.contains(tree, elem)) + newSet(RB.update(tree, elem, ())) } /** Creates a new `TreeSet` with the entry removed. @@ -134,31 +134,29 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * @return a new $coll containing all the elements of this $coll except `elem`. */ def - (elem:A): TreeSet[A] = - if (tree.lookup(elem).isEmpty) this - else newSet(tree delete elem) + if (!RB.contains(tree, elem)) this + else newSet(RB.delete(tree, elem)) /** Checks if this set contains element `elem`. * * @param elem the element to check for membership. * @return true, iff `elem` is contained in this set. */ - def contains(elem: A): Boolean = !lookup(tree, elem).isEmpty + def contains(elem: A): Boolean = RB.contains(tree, elem) /** Creates a new iterator over all elements contained in this * object. * * @return the new iterator */ - def iterator: Iterator[A] = tree.keyIterator + def iterator: Iterator[A] = RB.keyIterator(tree) - override def toStream: Stream[A] = tree.keyIterator.toStream - - override def foreach[U](f: A => U) = tree foreachKey f + override def foreach[U](f: A => U) = RB.foreachKey(tree, f) override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = { - val tree = this.tree.range(from, until) - newSet(tree) + val ntree = RB.range(tree, from, until) + newSet(ntree) } - override def firstKey = tree.first - override def lastKey = tree.last + override def firstKey = head + override def lastKey = last } diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala index 78fb645ce8..5c52a27e38 100644 --- a/test/files/scalacheck/redblack.scala +++ b/test/files/scalacheck/redblack.scala @@ -8,7 +8,7 @@ Properties of a Red & Black Tree: A node is either red or black. The root is black. (This rule is used in some definitions and not others. Since the -root can always be changed from red to black but not necessarily vice-versa this +root can always be changed from red to black but not necessarily vice-versa this rule has little effect on analysis.) All leaves are black. Both children of every red node are black. @@ -21,17 +21,17 @@ abstract class RedBlackTest extends Properties("RedBlack") { def maximumSize = 5 import RedBlack._ - - def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) - Some(tree.iterator.drop(n).next) + + def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) + Some(iterator(tree).drop(n).next) else None - - def treeContains[A](tree: Tree[String, A], key: String) = tree.iterator.map(_._1) contains key - - def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = + + def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key + + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = if (level == 0) { - value(Empty.empty) + value(null) } else { for { oddOrEven <- choose(0, 2) @@ -41,7 +41,7 @@ abstract class RedBlackTest extends Properties("RedBlack") { left <- mkTree(nextLevel, !isRed, label + "L") right <- mkTree(nextLevel, !isRed, label + "R") } yield { - if (isRed) + if (isRed) RedTree(label + "N", 0, left, right) else BlackTree(label + "N", 0, left, right) @@ -52,11 +52,11 @@ abstract class RedBlackTest extends Properties("RedBlack") { depth <- choose(minimumSize, maximumSize + 1) tree <- mkTree(depth) } yield tree - + type ModifyParm def genParm(tree: Tree[String, Int]): Gen[ModifyParm] def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] - + def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for { tree <- genTree parm <- genParm(tree) @@ -65,41 +65,41 @@ abstract class RedBlackTest extends Properties("RedBlack") { trait RedBlackInvariants { self: RedBlackTest => - + import RedBlack._ - - def rootIsBlack[A](t: Tree[String, A]) = t.isBlack - + + def rootIsBlack[A](t: Tree[String, A]) = isBlack(t) + def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match { - case Empty.Instance => t.isBlack - case ne: NonEmpty[_, _] => List(ne.left, ne.right) forall areAllLeavesBlack + case null => isBlack(t) + case ne => List(ne.left, ne.right) forall areAllLeavesBlack } - + def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match { - case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t)) + case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t)) case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack - case Empty.Instance => true + case null => true } - + def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match { - case Empty.Instance => List(1) + case null => List(1) case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves } - + def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match { - case Empty.Instance => true - case ne: NonEmpty[_, _] => + case null => true + case ne => ( - blackNodesToLeaves(ne).distinct.size == 1 - && areBlackNodesToLeavesEqual(ne.left) + blackNodesToLeaves(ne).distinct.size == 1 + && areBlackNodesToLeavesEqual(ne.left) && areBlackNodesToLeavesEqual(ne.right) ) } - - def orderIsPreserved[A](t: Tree[String, A]): Boolean = - t.iterator zip t.iterator.drop(1) forall { case (x, y) => x._1 < y._1 } - + + def orderIsPreserved[A](t: Tree[String, A]): Boolean = + iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 } + def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => invariant(newTree) } @@ -113,10 +113,10 @@ trait RedBlackInvariants { object TestInsert extends RedBlackTest with RedBlackInvariants { import RedBlack._ - + override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = tree update (generateKey(tree, parm), 0) + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0) def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { case Some((key, _)) => key.init.mkString + "MN" @@ -133,18 +133,18 @@ object TestInsert extends RedBlackTest with RedBlackInvariants { object TestModify extends RedBlackTest { import RedBlack._ - + def newValue = 1 override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { - case (key, _) => tree update (key, newValue) + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + case (key, _) => update(tree, key, newValue) } getOrElse tree property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) => nodeAt(tree,parm) forall { case (key, _) => - newTree.iterator contains (key, newValue) + iterator(newTree) contains (key, newValue) } } } @@ -154,11 +154,11 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { - case (key, _) => tree delete key + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + case (key, _) => delete(tree, key) } getOrElse tree - + property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) => nodeAt(tree, parm) forall { case (key, _) => !treeContains(newTree, key) @@ -168,37 +168,37 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { object TestRange extends RedBlackTest with RedBlackInvariants { import RedBlack._ - + override type ModifyParm = (Option[Int], Option[Int]) override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { - from <- choose(0, tree.iterator.size) - to <- choose(0, tree.iterator.size) suchThat (from <=) + from <- choose(0, iterator(tree).size) + to <- choose(0, iterator(tree).size) suchThat (from <=) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug } yield (optionalFrom, optionalTo) - + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - tree range (from, to) + range(tree, from, to) } - + property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) && - ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >)))) + ("lower boundary" |: (from forall ( key => iterator(newTree).map(_._1) forall (key <=)))) && + ("upper boundary" |: (to forall ( key => iterator(newTree).map(_._1) forall (key >)))) } - + property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - val filteredTree = (tree.iterator - .map(_._1) + val filteredTree = (iterator(tree) + .map(_._1) .filter(key => from forall (key >=)) .filter(key => to forall (key <)) .toList) - filteredTree == newTree.iterator.map(_._1).toList + filteredTree == iterator(newTree).map(_._1).toList } } } -- cgit v1.2.3 From 72ec0ac869a29fca9ea0d45a3f70f1e9e1babaaf Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Wed, 4 Jan 2012 17:10:20 +0100 Subject: Optimize foreach and iterators. --- .../scala/collection/immutable/RedBlack.scala | 108 +++++++++++++-------- .../scala/collection/immutable/TreeMap.scala | 5 +- .../scala/collection/immutable/TreeSet.scala | 2 +- test/files/scalacheck/treemap.scala | 16 +++ test/files/scalacheck/treeset.scala | 16 +++ 5 files changed, 103 insertions(+), 44 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 2537d043fd..6af6b6ef03 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -11,6 +11,7 @@ package scala.collection package immutable +import annotation.tailrec import annotation.meta.getter /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. @@ -37,7 +38,7 @@ object RedBlack { case tree => Some(tree.value) } - @annotation.tailrec + @tailrec def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp < 0) lookup(tree.left, x) @@ -64,18 +65,19 @@ object RedBlack { } def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { - foreach(tree.left, f) + if (tree.left ne null) foreach(tree.left, f) f((tree.key, tree.value)) - foreach(tree.right, f) + if (tree.right ne null) foreach(tree.right, f) } def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) { - foreachKey(tree.left, f) + if (tree.left ne null) foreachKey(tree.left, f) f(tree.key) - foreachKey(tree.right, f) + if (tree.right ne null) foreachKey(tree.right, f) } - def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = if (tree eq null) Iterator.empty else new TreeIterator(tree) - def keyIterator[A, _](tree: Tree[A, _]): Iterator[A] = if (tree eq null) Iterator.empty else new TreeKeyIterator(tree) + def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) + def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) + def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { if (isRedTree(l) && isRedTree(l.left)) @@ -283,7 +285,7 @@ object RedBlack { @(inline @getter) final val left: Tree[A, B], @(inline @getter) final val right: Tree[A, B]) extends Serializable { - @(inline @getter) final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) + final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) def isBlack: Boolean def nth(n: Int): Tree[A, B] = { val count = RedBlack.count(left) @@ -322,53 +324,75 @@ object RedBlack { def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } - private[this] class TreeIterator[A, B](tree: Tree[A, B]) extends Iterator[(A, B)] { + private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + override def hasNext: Boolean = next ne null - override def next: (A, B) = next match { + override def next: R = next match { case null => throw new NoSuchElementException("next on empty iterator") case tree => - addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) null else path.pop() - (tree.key, tree.value) + next = findNext(tree.right) + nextResult(tree) } - @annotation.tailrec - private[this] def addLeftMostBranchToPath(tree: Tree[A, B]) { - if (tree ne null) { - path.push(tree) - addLeftMostBranchToPath(tree.left) + @tailrec + private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) popPath() + else if (tree.left eq null) tree + else { + pushPath(tree) + findNext(tree.left) } } - private[this] val path = mutable.ArrayStack.empty[Tree[A, B]] - addLeftMostBranchToPath(tree) - private[this] var next: Tree[A, B] = path.pop() - } - - private[this] class TreeKeyIterator[A](tree: Tree[A, _]) extends Iterator[A] { - override def hasNext: Boolean = next ne null - - override def next: A = next match { - case null => - throw new NoSuchElementException("next on empty iterator") - case tree => - addLeftMostBranchToPath(tree.right) - next = if (path.isEmpty) null else path.pop() - tree.key + private[this] def pushPath(tree: Tree[A, B]) { + try { + path(index) = tree + index += 1 + } catch { + case _: ArrayIndexOutOfBoundsException => + // Either the tree became unbalanced or we calculated the maximum height incorrectly. + // To avoid crashing the iterator we expand the path array. Obviously this should never + // happen... + // + // An exception handler is used instead of an if-condition to optimize the normal path. + assert(index >= path.length) + path :+= null + pushPath(tree) + } + } + private[this] def popPath(): Tree[A, B] = if (index == 0) null else { + index -= 1 + path(index) } - @annotation.tailrec - private[this] def addLeftMostBranchToPath(tree: Tree[A, _]) { - if (tree ne null) { - path.push(tree) - addLeftMostBranchToPath(tree.left) - } + private[this] var path = if (tree eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1 + new Array[Tree[A, B]](maximumHeight) } + private[this] var index = 0 + private[this] var next: Tree[A, B] = findNext(tree) + } + + private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) { + override def nextResult(tree: Tree[A, B]) = tree.key + } - private[this] val path = mutable.ArrayStack.empty[Tree[A, _]] - addLeftMostBranchToPath(tree) - private[this] var next: Tree[A, _] = path.pop() + private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) { + override def nextResult(tree: Tree[A, B]) = tree.value } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 45e936444f..6e8cf625f4 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -196,7 +196,10 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: * * @return the new iterator */ - def iterator: Iterator[(A, B)] = RB.iterator(tree) + override def iterator: Iterator[(A, B)] = RB.iterator(tree) + + override def keysIterator: Iterator[A] = RB.keysIterator(tree) + override def valuesIterator: Iterator[B] = RB.valuesIterator(tree) override def contains(key: A): Boolean = RB.contains(tree, key) override def isDefinedAt(key: A): Boolean = RB.contains(tree, key) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 00ebeab868..7c27e9f5b0 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -149,7 +149,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O * * @return the new iterator */ - def iterator: Iterator[A] = RB.keyIterator(tree) + def iterator: Iterator[A] = RB.keysIterator(tree) override def foreach[U](f: A => U) = RB.foreachKey(tree, f) diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala index 43d307600d..9970bb01aa 100644 --- a/test/files/scalacheck/treemap.scala +++ b/test/files/scalacheck/treemap.scala @@ -22,6 +22,22 @@ object Test extends Properties("TreeMap") { consistent } + property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) => + /* + * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height + * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order. + * + * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure + * it is big enough for these worst-case trees. + */ + val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2 + val values = (1 to highest).reverse + val subject = TreeMap(values zip values: _*) + val it = subject.iterator + try { while (it.hasNext) it.next; true } catch { case _ => false } + } + property("sorted") = forAll { (subject: TreeMap[Int, String]) => (subject.size >= 3) ==> { subject.zip(subject.tail).forall { case (x, y) => x._1 < y._1 } }} diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala index 3cefef7040..87c3eb7108 100644 --- a/test/files/scalacheck/treeset.scala +++ b/test/files/scalacheck/treeset.scala @@ -18,6 +18,22 @@ object Test extends Properties("TreeSet") { consistent } + property("worst-case tree height is iterable") = forAll(choose(0, 10), arbitrary[Boolean]) { (n: Int, even: Boolean) => + /* + * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * you can construct a skinny tree of height 2n by inserting the elements [1 .. 2^(n+1) - 2] and a tree of height + * 2n+1 by inserting the elements [1 .. 3 * 2^n - 2], both in reverse order. + * + * Since we allocate a fixed size buffer in the iterator (based on the tree size) we need to ensure + * it is big enough for these worst-case trees. + */ + val highest = if (even) (1 << (n+1)) - 2 else 3*(1 << n) - 2 + val values = (1 to highest).reverse + val subject = TreeSet(values: _*) + val it = subject.iterator + try { while (it.hasNext) it.next; true } catch { case _ => false } + } + property("sorted") = forAll { (subject: TreeSet[Int]) => (subject.size >= 3) ==> { subject.zip(subject.tail).forall { case (x, y) => x < y } }} -- cgit v1.2.3 From d735d0f1d631942931765c793b983511359961e1 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Thu, 5 Jan 2012 20:05:25 +0100 Subject: Move nth method to RedBlack. Inline factories for tree nodes. --- .../scala/collection/immutable/RedBlack.scala | 26 ++++++++++++---------- .../scala/collection/immutable/TreeMap.scala | 6 ++--- .../scala/collection/immutable/TreeSet.scala | 6 ++--- 3 files changed, 20 insertions(+), 18 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 6af6b6ef03..5729260cb2 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -79,6 +79,14 @@ object RedBlack { def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = RedBlack.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { if (isRedTree(l) && isRedTree(l.left)) RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) @@ -287,16 +295,9 @@ object RedBlack { extends Serializable { final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) def isBlack: Boolean - def nth(n: Int): Tree[A, B] = { - val count = RedBlack.count(left) - if (n < count) left.nth(n) - else if (n > count) right.nth(n - count - 1) - else this - } def black: Tree[A, B] def red: Tree[A, B] } - final class RedTree[A, +B](key: A, value: B, left: Tree[A, B], @@ -306,10 +307,6 @@ object RedBlack { override def red = this override def toString = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" } - object RedTree { - def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) - def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) - } final class BlackTree[A, +B](key: A, value: B, left: Tree[A, B], @@ -319,8 +316,13 @@ object RedBlack { override def red = RedTree(key, value, left, right) override def toString = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" } + + object RedTree { + @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) + def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) + } object BlackTree { - def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) + @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 6e8cf625f4..7e22c19e11 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -88,20 +88,20 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: override def drop(n: Int) = { if (n <= 0) this else if (n >= size) empty - else from(tree.nth(n).key) + else from(RB.nth(tree, n).key) } override def take(n: Int) = { if (n <= 0) empty else if (n >= size) this - else until(tree.nth(n).key) + else until(RB.nth(tree, n).key) } override def slice(from: Int, until: Int) = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else range(tree.nth(from).key, tree.nth(until).key) + else range(RB.nth(tree, from).key, RB.nth(tree, until).key) } override def dropRight(n: Int) = take(size - n) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 7c27e9f5b0..d36bc374c2 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -67,20 +67,20 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O override def drop(n: Int) = { if (n <= 0) this else if (n >= size) empty - else from(tree.nth(n).key) + else from(RB.nth(tree, n).key) } override def take(n: Int) = { if (n <= 0) empty else if (n >= size) this - else until(tree.nth(n).key) + else until(RB.nth(tree, n).key) } override def slice(from: Int, until: Int) = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else range(tree.nth(from).key, tree.nth(until).key) + else range(RB.nth(tree, from).key, RB.nth(tree, until).key) } override def dropRight(n: Int) = take(size - n) -- cgit v1.2.3 From 388ff4716f9f4162165221c42fb2f2aa83e1063c Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Thu, 5 Jan 2012 21:04:49 +0100 Subject: Add implementation notes. Consistently use eq/ne to compare with null. --- .../scala/collection/immutable/RedBlack.scala | 31 +++++++++++++++++----- 1 file changed, 24 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 5729260cb2..30d3ff37a3 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -15,6 +15,11 @@ import annotation.tailrec import annotation.meta.getter /** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses null to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlack object tries to hide these optimizations + * behind a reasonably clean API. * * @since 2.3 */ @@ -103,7 +108,7 @@ object RedBlack { else mkTree(isBlack, x, xv, a, r) } - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree == null) { + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { RedTree(k, v, null, null) } else { val cmp = ordering.compare(k, tree.key) @@ -114,7 +119,7 @@ object RedBlack { // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree == null) null else { + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { if (isRedTree(tr)) { RedTree(x, xv, tl.black, tr.black) @@ -287,6 +292,15 @@ object RedBlack { } } + /* + * Forcing direct fields access using the @inline annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + */ sealed abstract class Tree[A, +B]( @(inline @getter) final val key: A, @(inline @getter) final val value: B, @@ -355,11 +369,14 @@ object RedBlack { index += 1 } catch { case _: ArrayIndexOutOfBoundsException => - // Either the tree became unbalanced or we calculated the maximum height incorrectly. - // To avoid crashing the iterator we expand the path array. Obviously this should never - // happen... - // - // An exception handler is used instead of an if-condition to optimize the normal path. + /* + * Either the tree became unbalanced or we calculated the maximum height incorrectly. + * To avoid crashing the iterator we expand the path array. Obviously this should never + * happen... + * + * An exception handler is used instead of an if-condition to optimize the normal path. + * This makes a large difference in iteration speed! + */ assert(index >= path.length) path :+= null pushPath(tree) -- cgit v1.2.3 From 7e92b3c60574d7fc0a0e83de738b835f4f98a685 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Fri, 6 Jan 2012 23:04:39 +0100 Subject: Deprecate TreeMap.isSmaller and TreeSet.isSmaller. These methods were used by the old RedBlack tree implementation, but are no longer required and were not defined in any interface. Use ordering or compare instead. --- src/library/scala/collection/immutable/TreeMap.scala | 1 + src/library/scala/collection/immutable/TreeSet.scala | 1 + 2 files changed, 2 insertions(+) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 7e22c19e11..65e42ad061 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -53,6 +53,7 @@ class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: import immutable.{RedBlack => RB} + @deprecated("use `ordering.lt` instead", "2.10") def isSmaller(x: A, y: A) = ordering.lt(x, y) override protected[this] def newBuilder : Builder[(A, B), TreeMap[A, B]] = diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index d36bc374c2..f7ceafdf8f 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -100,6 +100,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O (take(n), drop(n)) } + @deprecated("use `ordering.lt` instead", "2.10") def isSmaller(x: A, y: A) = compare(x,y) < 0 def this()(implicit ordering: Ordering[A]) = this(null)(ordering) -- cgit v1.2.3 From f656142ddbcecfd3f8482e2b55067de3d0ebd3ce Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Fri, 6 Jan 2012 23:19:39 +0100 Subject: Restore old RedBlack class to maintain backwards compatibility. The class is marked as deprecated and no longer used by the TreeMap/TreeSet implementation but is restored in case it was used by anyone else (since it was not marked as private to the Scala collection library). Renamed RedBlack.{Tree,RedTree,BlackTree} to Node, RedNode, and BlackNode to work around name clash with RedBlack class. --- .../scala/collection/immutable/RedBlack.scala | 561 ++++++++++++++++----- .../scala/collection/immutable/TreeMap.scala | 2 +- .../scala/collection/immutable/TreeSet.scala | 4 +- test/files/scalacheck/redblack.scala | 56 +- 4 files changed, 452 insertions(+), 171 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 30d3ff37a3..37ff7a7f54 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -26,167 +26,167 @@ import annotation.meta.getter private[immutable] object RedBlack { - private def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + def isBlack(tree: Node[_, _]) = (tree eq null) || isBlackNode(tree) + def isRedNode(tree: Node[_, _]) = tree.isInstanceOf[RedNode[_, _]] + def isBlackNode(tree: Node[_, _]) = tree.isInstanceOf[BlackNode[_, _]] - private def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = - if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) - - def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree) - def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]] - def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]] + def isEmpty(tree: Node[_, _]): Boolean = tree eq null - def isEmpty(tree: Tree[_, _]): Boolean = tree eq null - - def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null - def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { + def contains[A](tree: Node[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null + def get[A, B](tree: Node[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { case null => None case tree => Some(tree.value) } @tailrec - def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + def lookup[A, B](tree: Node[A, B], x: A)(implicit ordering: Ordering[A]): Node[A, B] = if (tree eq null) null else { val cmp = ordering.compare(x, tree.key) if (cmp < 0) lookup(tree.left, x) else if (cmp > 0) lookup(tree.right, x) else tree } - def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count - def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) - def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) - def range[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(tree, from, until)) + def count(tree: Node[_, _]) = if (tree eq null) 0 else tree.count + def update[A, B, B1 >: B](tree: Node[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Node[A, B1] = blacken(upd(tree, k, v)) + def delete[A, B](tree: Node[A, B], k: A)(implicit ordering: Ordering[A]): Node[A, B] = blacken(del(tree, k)) + def range[A, B](tree: Node[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Node[A, B] = blacken(rng(tree, from, until)) - def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + def smallest[A, B](tree: Node[A, B]): Node[A, B] = { if (tree eq null) throw new NoSuchElementException("empty map") var result = tree while (result.left ne null) result = result.left result } - def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + def greatest[A, B](tree: Node[A, B]): Node[A, B] = { if (tree eq null) throw new NoSuchElementException("empty map") var result = tree while (result.right ne null) result = result.right result } - def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { + def foreach[A, B, U](tree: Node[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { if (tree.left ne null) foreach(tree.left, f) f((tree.key, tree.value)) if (tree.right ne null) foreach(tree.right, f) } - def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) { + def foreachKey[A, U](tree: Node[A, _], f: A => U): Unit = if (tree ne null) { if (tree.left ne null) foreachKey(tree.left, f) f(tree.key) if (tree.right ne null) foreachKey(tree.right, f) } - def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) - def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) - def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) + def iterator[A, B](tree: Node[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) + def keysIterator[A, _](tree: Node[A, _]): Iterator[A] = new KeysIterator(tree) + def valuesIterator[_, B](tree: Node[_, B]): Iterator[B] = new ValuesIterator(tree) @tailrec - def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + def nth[A, B](tree: Node[A, B], n: Int): Node[A, B] = { val count = RedBlack.count(tree.left) if (n < count) nth(tree.left, n) else if (n > count) nth(tree.right, n - count - 1) else tree } - private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { - if (isRedTree(l) && isRedTree(l.left)) - RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) - else if (isRedTree(l) && isRedTree(l.right)) - RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) + private def blacken[A, B](t: Node[A, B]): Node[A, B] = if (t eq null) null else t.black + + private def mkNode[A, B](isBlack: Boolean, k: A, v: B, l: Node[A, B], r: Node[A, B]) = + if (isBlack) BlackNode(k, v, l, r) else RedNode(k, v, l, r) + + private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Node[A, B1], d: Node[A, B1]): Node[A, B1] = { + if (isRedNode(l) && isRedNode(l.left)) + RedNode(l.key, l.value, BlackNode(l.left.key, l.left.value, l.left.left, l.left.right), BlackNode(z, zv, l.right, d)) + else if (isRedNode(l) && isRedNode(l.right)) + RedNode(l.right.key, l.right.value, BlackNode(l.key, l.value, l.left, l.right.left), BlackNode(z, zv, l.right.right, d)) else - mkTree(isBlack, z, zv, l, d) + mkNode(isBlack, z, zv, l, d) } - private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = { - if (isRedTree(r) && isRedTree(r.left)) - RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) - else if (isRedTree(r) && isRedTree(r.right)) - RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) + private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Node[A, B1], r: Node[A, B1]): Node[A, B1] = { + if (isRedNode(r) && isRedNode(r.left)) + RedNode(r.left.key, r.left.value, BlackNode(x, xv, a, r.left.left), BlackNode(r.key, r.value, r.left.right, r.right)) + else if (isRedNode(r) && isRedNode(r.right)) + RedNode(r.key, r.value, BlackNode(x, xv, a, r.left), BlackNode(r.right.key, r.right.value, r.right.left, r.right.right)) else - mkTree(isBlack, x, xv, a, r) + mkNode(isBlack, x, xv, a, r) } - private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { - RedTree(k, v, null, null) + private[this] def upd[A, B, B1 >: B](tree: Node[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Node[A, B1] = if (tree eq null) { + RedNode(k, v, null, null) } else { val cmp = ordering.compare(k, tree.key) if (cmp < 0) balanceLeft(tree.isBlack, tree.key, tree.value, upd(tree.left, k, v), tree.right) else if (cmp > 0) balanceRight(tree.isBlack, tree.key, tree.value, tree.left, upd(tree.right, k, v)) - else mkTree(tree.isBlack, k, v, tree.left, tree.right) + else mkNode(tree.isBlack, k, v, tree.left, tree.right) } // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html - private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { - def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - if (isRedTree(tr)) { - RedTree(x, xv, tl.black, tr.black) - } else if (isRedTree(tl.left)) { - RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) - } else if (isRedTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + // http://www.cse.unsw.edu.au/~dons/data/RedBlackNode.html + private[this] def del[A, B](tree: Node[A, B], k: A)(implicit ordering: Ordering[A]): Node[A, B] = if (tree eq null) null else { + def balance(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tl)) { + if (isRedNode(tr)) { + RedNode(x, xv, tl.black, tr.black) + } else if (isRedNode(tl.left)) { + RedNode(tl.key, tl.value, tl.left.black, BlackNode(x, xv, tl.right, tr)) + } else if (isRedNode(tl.right)) { + RedNode(tl.right.key, tl.right.value, BlackNode(tl.key, tl.value, tl.left, tl.right.left), BlackNode(x, xv, tl.right.right, tr)) } else { - BlackTree(x, xv, tl, tr) + BlackNode(x, xv, tl, tr) } - } else if (isRedTree(tr)) { - if (isRedTree(tr.right)) { - RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) - } else if (isRedTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) + } else if (isRedNode(tr)) { + if (isRedNode(tr.right)) { + RedNode(tr.key, tr.value, BlackNode(x, xv, tl, tr.left), tr.right.black) + } else if (isRedNode(tr.left)) { + RedNode(tr.left.key, tr.left.value, BlackNode(x, xv, tl, tr.left.left), BlackNode(tr.key, tr.value, tr.left.right, tr.right)) } else { - BlackTree(x, xv, tl, tr) + BlackNode(x, xv, tl, tr) } } else { - BlackTree(x, xv, tl, tr) + BlackNode(x, xv, tl, tr) } - def subl(t: Tree[A, B]) = - if (t.isInstanceOf[BlackTree[_, _]]) t.red + def subl(t: Node[A, B]) = + if (t.isInstanceOf[BlackNode[_, _]]) t.red else sys.error("Defect: invariance violation; expected black, got "+t) - def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { - RedTree(x, xv, tl.black, tr) - } else if (isBlackTree(tr)) { + def balLeft(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tl)) { + RedNode(x, xv, tl.black, tr) + } else if (isBlackNode(tr)) { balance(x, xv, tl, tr.red) - } else if (isRedTree(tr) && isBlackTree(tr.left)) { - RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) + } else if (isRedNode(tr) && isBlackNode(tr.left)) { + RedNode(tr.left.key, tr.left.value, BlackNode(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) } else { - sys.error("Defect: invariance violation at ") // TODO + sys.error("Defect: invariance violation") } - def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { - RedTree(x, xv, tl, tr.black) - } else if (isBlackTree(tl)) { + def balRight(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tr)) { + RedNode(x, xv, tl, tr.black) + } else if (isBlackNode(tl)) { balance(x, xv, tl.red, tr) - } else if (isRedTree(tl) && isBlackTree(tl.right)) { - RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else if (isRedNode(tl) && isBlackNode(tl.right)) { + RedNode(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackNode(x, xv, tl.right.right, tr)) } else { - sys.error("Defect: invariance violation at ") // TODO + sys.error("Defect: invariance violation") } - def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right) - def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k)) - def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) { + def delLeft = if (isBlackNode(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedNode(tree.key, tree.value, del(tree.left, k), tree.right) + def delRight = if (isBlackNode(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedNode(tree.key, tree.value, tree.left, del(tree.right, k)) + def append(tl: Node[A, B], tr: Node[A, B]): Node[A, B] = if (tl eq null) { tr } else if (tr eq null) { tl - } else if (isRedTree(tl) && isRedTree(tr)) { + } else if (isRedNode(tl) && isRedNode(tr)) { val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) + if (isRedNode(bc)) { + RedNode(bc.key, bc.value, RedNode(tl.key, tl.value, tl.left, bc.left), RedNode(tr.key, tr.value, bc.right, tr.right)) } else { - RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) + RedNode(tl.key, tl.value, tl.left, RedNode(tr.key, tr.value, bc, tr.right)) } - } else if (isBlackTree(tl) && isBlackTree(tr)) { + } else if (isBlackNode(tl) && isBlackNode(tr)) { val bc = append(tl.right, tr.left) - if (isRedTree(bc)) { - RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) + if (isRedNode(bc)) { + RedNode(bc.key, bc.value, BlackNode(tl.key, tl.value, tl.left, bc.left), BlackNode(tr.key, tr.value, bc.right, tr.right)) } else { - balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) + balLeft(tl.key, tl.value, tl.left, BlackNode(tr.key, tr.value, bc, tr.right)) } - } else if (isRedTree(tr)) { - RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) - } else if (isRedTree(tl)) { - RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) + } else if (isRedNode(tr)) { + RedNode(tr.key, tr.value, append(tl, tr.left), tr.right) + } else if (isRedNode(tl)) { + RedNode(tl.key, tl.value, tl.left, append(tl.right, tr)) } else { sys.error("unmatched tree on append: " + tl + ", " + tr) } @@ -197,7 +197,7 @@ object RedBlack { else append(tree.left, tree.right) } - private[this] def rng[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { + private[this] def rng[A, B](tree: Node[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Node[A, B] = { if (tree eq null) return null if (from == None && until == None) return tree if (from != None && ordering.lt(tree.key, from.get)) return rng(tree.right, from, until); @@ -219,9 +219,9 @@ object RedBlack { // whether the zipper was traversed left-most or right-most. // If the trees were balanced, returns an empty zipper - private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = { + private[this] def compareDepth[A, B](left: Node[A, B], right: Node[A, B]): (List[Node[A, B]], Boolean, Boolean, Int) = { // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = { + def unzip(zipper: List[Node[A, B]], leftMost: Boolean): List[Node[A, B]] = { val next = if (leftMost) zipper.head.left else zipper.head.right next match { case null => zipper @@ -231,25 +231,25 @@ object RedBlack { // Unzip left tree on the rightmost side and right tree on the leftmost side until one is // found to be deeper, or the bottom is reached - def unzipBoth(left: Tree[A, B], - right: Tree[A, B], - leftZipper: List[Tree[A, B]], - rightZipper: List[Tree[A, B]], - smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = { - if (isBlackTree(left) && isBlackTree(right)) { + def unzipBoth(left: Node[A, B], + right: Node[A, B], + leftZipper: List[Node[A, B]], + rightZipper: List[Node[A, B]], + smallerDepth: Int): (List[Node[A, B]], Boolean, Boolean, Int) = { + if (isBlackNode(left) && isBlackNode(right)) { unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1) - } else if (isRedTree(left) && isRedTree(right)) { + } else if (isRedNode(left) && isRedNode(right)) { unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth) - } else if (isRedTree(right)) { + } else if (isRedNode(right)) { unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth) - } else if (isRedTree(left)) { + } else if (isRedNode(left)) { unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth) } else if ((left eq null) && (right eq null)) { (Nil, true, false, smallerDepth) - } else if ((left eq null) && isBlackTree(right)) { + } else if ((left eq null) && isBlackNode(right)) { val leftMost = true (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth) - } else if (isBlackTree(left) && (right eq null)) { + } else if (isBlackNode(left) && (right eq null)) { val leftMost = false (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth) } else { @@ -258,10 +258,10 @@ object RedBlack { } unzipBoth(left, right, Nil, Nil, 0) } - private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = { + private[this] def rebalance[A, B](tree: Node[A, B], newLeft: Node[A, B], newRight: Node[A, B]) = { // This is like drop(n-1), but only counting black nodes - def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match { - case head :: tail if isBlackTree(head) => + def findDepth(zipper: List[Node[A, B]], depth: Int): List[Node[A, B]] = zipper match { + case head :: tail if isBlackNode(head) => if (depth == 1) zipper else findDepth(tail, depth - 1) case _ :: tail => findDepth(tail, depth) case Nil => sys.error("Defect: unexpected empty zipper while computing range") @@ -274,15 +274,15 @@ object RedBlack { val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) if (levelled) { - BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight) + BlackNode(tree.key, tree.value, blkNewLeft, blkNewRight) } else { val zipFrom = findDepth(zipper, smallerDepth) val union = if (leftMost) { - RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head) + RedNode(tree.key, tree.value, blkNewLeft, zipFrom.head) } else { - RedTree(tree.key, tree.value, zipFrom.head, blkNewRight) + RedNode(tree.key, tree.value, zipFrom.head, blkNewRight) } - val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) => + val zippedTree = zipFrom.tail.foldLeft(union: Node[A, B]) { (tree, node) => if (leftMost) balanceLeft(node.isBlack, node.key, node.value, tree, node.right) else @@ -301,47 +301,47 @@ object RedBlack { * * An alternative is to implement the these classes using plain old Java code... */ - sealed abstract class Tree[A, +B]( + sealed abstract class Node[A, +B]( @(inline @getter) final val key: A, @(inline @getter) final val value: B, - @(inline @getter) final val left: Tree[A, B], - @(inline @getter) final val right: Tree[A, B]) + @(inline @getter) final val left: Node[A, B], + @(inline @getter) final val right: Node[A, B]) extends Serializable { final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) def isBlack: Boolean - def black: Tree[A, B] - def red: Tree[A, B] + def black: Node[A, B] + def red: Node[A, B] } - final class RedTree[A, +B](key: A, + final class RedNode[A, +B](key: A, value: B, - left: Tree[A, B], - right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + left: Node[A, B], + right: Node[A, B]) extends Node[A, B](key, value, left, right) { override def isBlack = false - override def black = BlackTree(key, value, left, right) + override def black = BlackNode(key, value, left, right) override def red = this - override def toString = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" + override def toString = "RedNode(" + key + ", " + value + ", " + left + ", " + right + ")" } - final class BlackTree[A, +B](key: A, + final class BlackNode[A, +B](key: A, value: B, - left: Tree[A, B], - right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + left: Node[A, B], + right: Node[A, B]) extends Node[A, B](key, value, left, right) { override def isBlack = true override def black = this - override def red = RedTree(key, value, left, right) - override def toString = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" + override def red = RedNode(key, value, left, right) + override def toString = "BlackNode(" + key + ", " + value + ", " + left + ", " + right + ")" } - object RedTree { - @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) - def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) + object RedNode { + @inline def apply[A, B](key: A, value: B, left: Node[A, B], right: Node[A, B]) = new RedNode(key, value, left, right) + def unapply[A, B](t: RedNode[A, B]) = Some((t.key, t.value, t.left, t.right)) } - object BlackTree { - @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) - def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) + object BlackNode { + @inline def apply[A, B](key: A, value: B, left: Node[A, B], right: Node[A, B]) = new BlackNode(key, value, left, right) + def unapply[A, B](t: BlackNode[A, B]) = Some((t.key, t.value, t.left, t.right)) } - private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] { - protected[this] def nextResult(tree: Tree[A, B]): R + private[this] abstract class TreeIterator[A, B, R](tree: Node[A, B]) extends Iterator[R] { + protected[this] def nextResult(tree: Node[A, B]): R override def hasNext: Boolean = next ne null @@ -354,7 +354,7 @@ object RedBlack { } @tailrec - private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = { + private[this] def findNext(tree: Node[A, B]): Node[A, B] = { if (tree eq null) popPath() else if (tree.left eq null) tree else { @@ -363,7 +363,7 @@ object RedBlack { } } - private[this] def pushPath(tree: Tree[A, B]) { + private[this] def pushPath(tree: Node[A, B]) { try { path(index) = tree index += 1 @@ -382,7 +382,7 @@ object RedBlack { pushPath(tree) } } - private[this] def popPath(): Tree[A, B] = if (index == 0) null else { + private[this] def popPath(): Node[A, B] = if (index == 0) null else { index -= 1 path(index) } @@ -397,21 +397,302 @@ object RedBlack { * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one. */ val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1 - new Array[Tree[A, B]](maximumHeight) + new Array[Node[A, B]](maximumHeight) } private[this] var index = 0 - private[this] var next: Tree[A, B] = findNext(tree) + private[this] var next: Node[A, B] = findNext(tree) + } + + private[this] class EntriesIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, (A, B)](tree) { + override def nextResult(tree: Node[A, B]) = (tree.key, tree.value) } - private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) { - override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + private[this] class KeysIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, A](tree) { + override def nextResult(tree: Node[A, B]) = tree.key } - private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) { - override def nextResult(tree: Tree[A, B]) = tree.key + private[this] class ValuesIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, B](tree) { + override def nextResult(tree: Node[A, B]) = tree.value } +} + + +/** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`. + * + * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information). + * + * @since 2.3 + */ +@deprecated("use `TreeMap` or `TreeSet` instead", "2.10") +@SerialVersionUID(8691885935445612921L) +abstract class RedBlack[A] extends Serializable { - private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) { - override def nextResult(tree: Tree[A, B]) = tree.value + def isSmaller(x: A, y: A): Boolean + + private def blacken[B](t: Tree[B]): Tree[B] = t match { + case RedTree(k, v, l, r) => BlackTree(k, v, l, r) + case t => t + } + private def mkTree[B](isBlack: Boolean, k: A, v: B, l: Tree[B], r: Tree[B]) = + if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) + + abstract class Tree[+B] extends Serializable { + def isEmpty: Boolean + def isBlack: Boolean + def lookup(x: A): Tree[B] + def update[B1 >: B](k: A, v: B1): Tree[B1] = blacken(upd(k, v)) + def delete(k: A): Tree[B] = blacken(del(k)) + def range(from: Option[A], until: Option[A]): Tree[B] = blacken(rng(from, until)) + def foreach[U](f: (A, B) => U) + def toStream: Stream[(A,B)] + def iterator: Iterator[(A, B)] + def upd[B1 >: B](k: A, v: B1): Tree[B1] + def del(k: A): Tree[B] + def smallest: NonEmpty[B] + def rng(from: Option[A], until: Option[A]): Tree[B] + def first : A + def last : A + def count : Int + } + abstract class NonEmpty[+B] extends Tree[B] with Serializable { + def isEmpty = false + def key: A + def value: B + def left: Tree[B] + def right: Tree[B] + def lookup(k: A): Tree[B] = + if (isSmaller(k, key)) left.lookup(k) + else if (isSmaller(key, k)) right.lookup(k) + else this + private[this] def balanceLeft[B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[B1], d: Tree[B1])/*: NonEmpty[B1]*/ = l match { + case RedTree(y, yv, RedTree(x, xv, a, b), c) => + RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) + case RedTree(x, xv, a, RedTree(y, yv, b, c)) => + RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) + case _ => + mkTree(isBlack, z, zv, l, d) + } + private[this] def balanceRight[B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[B1], r: Tree[B1])/*: NonEmpty[B1]*/ = r match { + case RedTree(z, zv, RedTree(y, yv, b, c), d) => + RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) + case RedTree(y, yv, b, RedTree(z, zv, c, d)) => + RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) + case _ => + mkTree(isBlack, x, xv, a, r) + } + def upd[B1 >: B](k: A, v: B1): Tree[B1] = { + if (isSmaller(k, key)) balanceLeft(isBlack, key, value, left.upd(k, v), right) + else if (isSmaller(key, k)) balanceRight(isBlack, key, value, left, right.upd(k, v)) + else mkTree(isBlack, k, v, left, right) + } + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html + def del(k: A): Tree[B] = { + def balance(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + case (RedTree(y, yv, a, b), RedTree(z, zv, c, d)) => + RedTree(x, xv, BlackTree(y, yv, a, b), BlackTree(z, zv, c, d)) + case (RedTree(y, yv, RedTree(z, zv, a, b), c), d) => + RedTree(y, yv, BlackTree(z, zv, a, b), BlackTree(x, xv, c, d)) + case (RedTree(y, yv, a, RedTree(z, zv, b, c)), d) => + RedTree(z, zv, BlackTree(y, yv, a, b), BlackTree(x, xv, c, d)) + case (a, RedTree(y, yv, b, RedTree(z, zv, c, d))) => + RedTree(y, yv, BlackTree(x, xv, a, b), BlackTree(z, zv, c, d)) + case (a, RedTree(y, yv, RedTree(z, zv, b, c), d)) => + RedTree(z, zv, BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) + case (a, b) => + BlackTree(x, xv, a, b) + } + def subl(t: Tree[B]) = t match { + case BlackTree(x, xv, a, b) => RedTree(x, xv, a, b) + case _ => sys.error("Defect: invariance violation; expected black, got "+t) + } + def balLeft(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + case (RedTree(y, yv, a, b), c) => + RedTree(x, xv, BlackTree(y, yv, a, b), c) + case (bl, BlackTree(y, yv, a, b)) => + balance(x, xv, bl, RedTree(y, yv, a, b)) + case (bl, RedTree(y, yv, BlackTree(z, zv, a, b), c)) => + RedTree(z, zv, BlackTree(x, xv, bl, a), balance(y, yv, b, subl(c))) + case _ => sys.error("Defect: invariance violation at "+right) + } + def balRight(x: A, xv: B, tl: Tree[B], tr: Tree[B]) = (tl, tr) match { + case (a, RedTree(y, yv, b, c)) => + RedTree(x, xv, a, BlackTree(y, yv, b, c)) + case (BlackTree(y, yv, a, b), bl) => + balance(x, xv, RedTree(y, yv, a, b), bl) + case (RedTree(y, yv, a, BlackTree(z, zv, b, c)), bl) => + RedTree(z, zv, balance(y, yv, subl(a), b), BlackTree(x, xv, c, bl)) + case _ => sys.error("Defect: invariance violation at "+left) + } + def delLeft = left match { + case _: BlackTree[_] => balLeft(key, value, left.del(k), right) + case _ => RedTree(key, value, left.del(k), right) + } + def delRight = right match { + case _: BlackTree[_] => balRight(key, value, left, right.del(k)) + case _ => RedTree(key, value, left, right.del(k)) + } + def append(tl: Tree[B], tr: Tree[B]): Tree[B] = (tl, tr) match { + case (Empty, t) => t + case (t, Empty) => t + case (RedTree(x, xv, a, b), RedTree(y, yv, c, d)) => + append(b, c) match { + case RedTree(z, zv, bb, cc) => RedTree(z, zv, RedTree(x, xv, a, bb), RedTree(y, yv, cc, d)) + case bc => RedTree(x, xv, a, RedTree(y, yv, bc, d)) + } + case (BlackTree(x, xv, a, b), BlackTree(y, yv, c, d)) => + append(b, c) match { + case RedTree(z, zv, bb, cc) => RedTree(z, zv, BlackTree(x, xv, a, bb), BlackTree(y, yv, cc, d)) + case bc => balLeft(x, xv, a, BlackTree(y, yv, bc, d)) + } + case (a, RedTree(x, xv, b, c)) => RedTree(x, xv, append(a, b), c) + case (RedTree(x, xv, a, b), c) => RedTree(x, xv, a, append(b, c)) + } + // RedBlack is neither A : Ordering[A], nor A <% Ordered[A] + k match { + case _ if isSmaller(k, key) => delLeft + case _ if isSmaller(key, k) => delRight + case _ => append(left, right) + } + } + + def smallest: NonEmpty[B] = if (left.isEmpty) this else left.smallest + + def toStream: Stream[(A,B)] = + left.toStream ++ Stream((key,value)) ++ right.toStream + + def iterator: Iterator[(A, B)] = + left.iterator ++ Iterator.single(Pair(key, value)) ++ right.iterator + + def foreach[U](f: (A, B) => U) { + left foreach f + f(key, value) + right foreach f + } + + override def rng(from: Option[A], until: Option[A]): Tree[B] = { + if (from == None && until == None) return this + if (from != None && isSmaller(key, from.get)) return right.rng(from, until); + if (until != None && (isSmaller(until.get,key) || !isSmaller(key,until.get))) + return left.rng(from, until); + val newLeft = left.rng(from, None) + val newRight = right.rng(None, until) + if ((newLeft eq left) && (newRight eq right)) this + else if (newLeft eq Empty) newRight.upd(key, value); + else if (newRight eq Empty) newLeft.upd(key, value); + else rebalance(newLeft, newRight) + } + + // The zipper returned might have been traversed left-most (always the left child) + // or right-most (always the right child). Left trees are traversed right-most, + // and right trees are traversed leftmost. + + // Returns the zipper for the side with deepest black nodes depth, a flag + // indicating whether the trees were unbalanced at all, and a flag indicating + // whether the zipper was traversed left-most or right-most. + + // If the trees were balanced, returns an empty zipper + private[this] def compareDepth(left: Tree[B], right: Tree[B]): (List[NonEmpty[B]], Boolean, Boolean, Int) = { + // Once a side is found to be deeper, unzip it to the bottom + def unzip(zipper: List[NonEmpty[B]], leftMost: Boolean): List[NonEmpty[B]] = { + val next = if (leftMost) zipper.head.left else zipper.head.right + next match { + case node: NonEmpty[_] => unzip(node :: zipper, leftMost) + case Empty => zipper + } + } + + // Unzip left tree on the rightmost side and right tree on the leftmost side until one is + // found to be deeper, or the bottom is reached + def unzipBoth(left: Tree[B], + right: Tree[B], + leftZipper: List[NonEmpty[B]], + rightZipper: List[NonEmpty[B]], + smallerDepth: Int): (List[NonEmpty[B]], Boolean, Boolean, Int) = (left, right) match { + case (l @ BlackTree(_, _, _, _), r @ BlackTree(_, _, _, _)) => + unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth + 1) + case (l @ RedTree(_, _, _, _), r @ RedTree(_, _, _, _)) => + unzipBoth(l.right, r.left, l :: leftZipper, r :: rightZipper, smallerDepth) + case (_, r @ RedTree(_, _, _, _)) => + unzipBoth(left, r.left, leftZipper, r :: rightZipper, smallerDepth) + case (l @ RedTree(_, _, _, _), _) => + unzipBoth(l.right, right, l :: leftZipper, rightZipper, smallerDepth) + case (Empty, Empty) => + (Nil, true, false, smallerDepth) + case (Empty, r @ BlackTree(_, _, _, _)) => + val leftMost = true + (unzip(r :: rightZipper, leftMost), false, leftMost, smallerDepth) + case (l @ BlackTree(_, _, _, _), Empty) => + val leftMost = false + (unzip(l :: leftZipper, leftMost), false, leftMost, smallerDepth) + } + unzipBoth(left, right, Nil, Nil, 0) + } + + private[this] def rebalance(newLeft: Tree[B], newRight: Tree[B]) = { + // This is like drop(n-1), but only counting black nodes + def findDepth(zipper: List[NonEmpty[B]], depth: Int): List[NonEmpty[B]] = zipper match { + case BlackTree(_, _, _, _) :: tail => + if (depth == 1) zipper else findDepth(tail, depth - 1) + case _ :: tail => findDepth(tail, depth) + case Nil => sys.error("Defect: unexpected empty zipper while computing range") + } + + // Blackening the smaller tree avoids balancing problems on union; + // this can't be done later, though, or it would change the result of compareDepth + val blkNewLeft = blacken(newLeft) + val blkNewRight = blacken(newRight) + val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) + + if (levelled) { + BlackTree(key, value, blkNewLeft, blkNewRight) + } else { + val zipFrom = findDepth(zipper, smallerDepth) + val union = if (leftMost) { + RedTree(key, value, blkNewLeft, zipFrom.head) + } else { + RedTree(key, value, zipFrom.head, blkNewRight) + } + val zippedTree = zipFrom.tail.foldLeft(union: Tree[B]) { (tree, node) => + if (leftMost) + balanceLeft(node.isBlack, node.key, node.value, tree, node.right) + else + balanceRight(node.isBlack, node.key, node.value, node.left, tree) + } + zippedTree + } + } + def first = if (left .isEmpty) key else left.first + def last = if (right.isEmpty) key else right.last + def count = 1 + left.count + right.count + } + case object Empty extends Tree[Nothing] { + def isEmpty = true + def isBlack = true + def lookup(k: A): Tree[Nothing] = this + def upd[B](k: A, v: B): Tree[B] = RedTree(k, v, Empty, Empty) + def del(k: A): Tree[Nothing] = this + def smallest: NonEmpty[Nothing] = throw new NoSuchElementException("empty map") + def iterator: Iterator[(A, Nothing)] = Iterator.empty + def toStream: Stream[(A,Nothing)] = Stream.empty + + def foreach[U](f: (A, Nothing) => U) {} + + def rng(from: Option[A], until: Option[A]) = this + def first = throw new NoSuchElementException("empty map") + def last = throw new NoSuchElementException("empty map") + def count = 0 + } + case class RedTree[+B](override val key: A, + override val value: B, + override val left: Tree[B], + override val right: Tree[B]) extends NonEmpty[B] { + def isBlack = false + } + case class BlackTree[+B](override val key: A, + override val value: B, + override val left: Tree[B], + override val right: Tree[B]) extends NonEmpty[B] { + def isBlack = true } } diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 65e42ad061..50244ef21d 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -45,7 +45,7 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -class TreeMap[A, +B] private (tree: RedBlack.Tree[A, B])(implicit val ordering: Ordering[A]) +class TreeMap[A, +B] private (tree: RedBlack.Node[A, B])(implicit val ordering: Ordering[A]) extends SortedMap[A, B] with SortedMapLike[A, B, TreeMap[A, B]] with MapLike[A, B, TreeMap[A, B]] diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index f7ceafdf8f..899ef0e5eb 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -47,7 +47,7 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @define willNotTerminateInf */ @SerialVersionUID(-5685982407650748405L) -class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: Ordering[A]) +class TreeSet[A] private (tree: RedBlack.Node[A, Unit])(implicit val ordering: Ordering[A]) extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { import immutable.{RedBlack => RB} @@ -105,7 +105,7 @@ class TreeSet[A] private (tree: RedBlack.Tree[A, Unit])(implicit val ordering: O def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - private def newSet(t: RedBlack.Tree[A, Unit]) = new TreeSet[A](t) + private def newSet(t: RedBlack.Node[A, Unit]) = new TreeSet[A](t) /** A factory to create empty sets of the same type of keys. */ diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala index 5c52a27e38..83d3ca0c1f 100644 --- a/test/files/scalacheck/redblack.scala +++ b/test/files/scalacheck/redblack.scala @@ -22,14 +22,14 @@ abstract class RedBlackTest extends Properties("RedBlack") { import RedBlack._ - def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) + def nodeAt[A](tree: Node[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) Some(iterator(tree).drop(n).next) else None - def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key + def treeContains[A](tree: Node[String, A], key: String) = iterator(tree).map(_._1) contains key - def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Node[String, Int]] = if (level == 0) { value(null) } else { @@ -42,9 +42,9 @@ abstract class RedBlackTest extends Properties("RedBlack") { right <- mkTree(nextLevel, !isRed, label + "R") } yield { if (isRed) - RedTree(label + "N", 0, left, right) + RedNode(label + "N", 0, left, right) else - BlackTree(label + "N", 0, left, right) + BlackNode(label + "N", 0, left, right) } } @@ -54,10 +54,10 @@ abstract class RedBlackTest extends Properties("RedBlack") { } yield tree type ModifyParm - def genParm(tree: Tree[String, Int]): Gen[ModifyParm] - def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] + def genParm(tree: Node[String, Int]): Gen[ModifyParm] + def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] - def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for { + def genInput: Gen[(Node[String, Int], ModifyParm, Node[String, Int])] = for { tree <- genTree parm <- genParm(tree) } yield (tree, parm, modify(tree, parm)) @@ -68,26 +68,26 @@ trait RedBlackInvariants { import RedBlack._ - def rootIsBlack[A](t: Tree[String, A]) = isBlack(t) + def rootIsBlack[A](t: Node[String, A]) = isBlack(t) - def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match { + def areAllLeavesBlack[A](t: Node[String, A]): Boolean = t match { case null => isBlack(t) case ne => List(ne.left, ne.right) forall areAllLeavesBlack } - def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match { - case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t)) - case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack + def areRedNodeChildrenBlack[A](t: Node[String, A]): Boolean = t match { + case RedNode(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t)) + case BlackNode(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack case null => true } - def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match { + def blackNodesToLeaves[A](t: Node[String, A]): List[Int] = t match { case null => List(1) - case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) - case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves + case BlackNode(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) + case RedNode(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves } - def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match { + def areBlackNodesToLeavesEqual[A](t: Node[String, A]): Boolean = t match { case null => true case ne => ( @@ -97,10 +97,10 @@ trait RedBlackInvariants { ) } - def orderIsPreserved[A](t: Tree[String, A]): Boolean = + def orderIsPreserved[A](t: Node[String, A]): Boolean = iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 } - def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => + def setup(invariant: Node[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => invariant(newTree) } @@ -115,10 +115,10 @@ object TestInsert extends RedBlackTest with RedBlackInvariants { import RedBlack._ override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0) + override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1) + override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = update(tree, generateKey(tree, parm), 0) - def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { + def generateKey(tree: Node[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { case Some((key, _)) => key.init.mkString + "MN" case None => nodeAt(tree, parm - 1) match { case Some((key, _)) => key.init.mkString + "RN" @@ -137,8 +137,8 @@ object TestModify extends RedBlackTest { def newValue = 1 override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = nodeAt(tree, parm) map { case (key, _) => update(tree, key, newValue) } getOrElse tree @@ -154,8 +154,8 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = nodeAt(tree, parm) map { case (key, _) => delete(tree, key) } getOrElse tree @@ -170,14 +170,14 @@ object TestRange extends RedBlackTest with RedBlackInvariants { import RedBlack._ override type ModifyParm = (Option[Int], Option[Int]) - override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { + override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = for { from <- choose(0, iterator(tree).size) to <- choose(0, iterator(tree).size) suchThat (from <=) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug } yield (optionalFrom, optionalTo) - override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { + override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) range(tree, from, to) -- cgit v1.2.3 From 288874d80856317744c582f1468d7af420d9e0ee Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 7 Jan 2012 15:26:40 +0100 Subject: Renamed object RedBlack to RedBlackTree. This more clearly separates the new implementation from the now deprecated abstract class RedBlack and avoids naming conflicts for the member classes. --- .../scala/collection/immutable/RedBlack.scala | 406 -------------------- .../scala/collection/immutable/RedBlackTree.scala | 416 +++++++++++++++++++++ .../scala/collection/immutable/TreeMap.scala | 5 +- .../scala/collection/immutable/TreeSet.scala | 7 +- test/files/scalacheck/redblack.scala | 113 +++--- test/files/scalacheck/redblacktree.scala | 212 +++++++++++ 6 files changed, 690 insertions(+), 469 deletions(-) create mode 100644 src/library/scala/collection/immutable/RedBlackTree.scala create mode 100644 test/files/scalacheck/redblacktree.scala diff --git a/src/library/scala/collection/immutable/RedBlack.scala b/src/library/scala/collection/immutable/RedBlack.scala index 37ff7a7f54..83eeaa45ee 100644 --- a/src/library/scala/collection/immutable/RedBlack.scala +++ b/src/library/scala/collection/immutable/RedBlack.scala @@ -11,412 +11,6 @@ package scala.collection package immutable -import annotation.tailrec -import annotation.meta.getter - -/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. - * - * Implementation note: since efficiency is important for data structures this implementation - * uses null to represent empty trees. This also means pattern matching cannot - * easily be used. The API represented by the RedBlack object tries to hide these optimizations - * behind a reasonably clean API. - * - * @since 2.3 - */ -private[immutable] -object RedBlack { - - def isBlack(tree: Node[_, _]) = (tree eq null) || isBlackNode(tree) - def isRedNode(tree: Node[_, _]) = tree.isInstanceOf[RedNode[_, _]] - def isBlackNode(tree: Node[_, _]) = tree.isInstanceOf[BlackNode[_, _]] - - def isEmpty(tree: Node[_, _]): Boolean = tree eq null - - def contains[A](tree: Node[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null - def get[A, B](tree: Node[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { - case null => None - case tree => Some(tree.value) - } - - @tailrec - def lookup[A, B](tree: Node[A, B], x: A)(implicit ordering: Ordering[A]): Node[A, B] = if (tree eq null) null else { - val cmp = ordering.compare(x, tree.key) - if (cmp < 0) lookup(tree.left, x) - else if (cmp > 0) lookup(tree.right, x) - else tree - } - - def count(tree: Node[_, _]) = if (tree eq null) 0 else tree.count - def update[A, B, B1 >: B](tree: Node[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Node[A, B1] = blacken(upd(tree, k, v)) - def delete[A, B](tree: Node[A, B], k: A)(implicit ordering: Ordering[A]): Node[A, B] = blacken(del(tree, k)) - def range[A, B](tree: Node[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Node[A, B] = blacken(rng(tree, from, until)) - - def smallest[A, B](tree: Node[A, B]): Node[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty map") - var result = tree - while (result.left ne null) result = result.left - result - } - def greatest[A, B](tree: Node[A, B]): Node[A, B] = { - if (tree eq null) throw new NoSuchElementException("empty map") - var result = tree - while (result.right ne null) result = result.right - result - } - - def foreach[A, B, U](tree: Node[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { - if (tree.left ne null) foreach(tree.left, f) - f((tree.key, tree.value)) - if (tree.right ne null) foreach(tree.right, f) - } - def foreachKey[A, U](tree: Node[A, _], f: A => U): Unit = if (tree ne null) { - if (tree.left ne null) foreachKey(tree.left, f) - f(tree.key) - if (tree.right ne null) foreachKey(tree.right, f) - } - - def iterator[A, B](tree: Node[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) - def keysIterator[A, _](tree: Node[A, _]): Iterator[A] = new KeysIterator(tree) - def valuesIterator[_, B](tree: Node[_, B]): Iterator[B] = new ValuesIterator(tree) - - @tailrec - def nth[A, B](tree: Node[A, B], n: Int): Node[A, B] = { - val count = RedBlack.count(tree.left) - if (n < count) nth(tree.left, n) - else if (n > count) nth(tree.right, n - count - 1) - else tree - } - - private def blacken[A, B](t: Node[A, B]): Node[A, B] = if (t eq null) null else t.black - - private def mkNode[A, B](isBlack: Boolean, k: A, v: B, l: Node[A, B], r: Node[A, B]) = - if (isBlack) BlackNode(k, v, l, r) else RedNode(k, v, l, r) - - private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Node[A, B1], d: Node[A, B1]): Node[A, B1] = { - if (isRedNode(l) && isRedNode(l.left)) - RedNode(l.key, l.value, BlackNode(l.left.key, l.left.value, l.left.left, l.left.right), BlackNode(z, zv, l.right, d)) - else if (isRedNode(l) && isRedNode(l.right)) - RedNode(l.right.key, l.right.value, BlackNode(l.key, l.value, l.left, l.right.left), BlackNode(z, zv, l.right.right, d)) - else - mkNode(isBlack, z, zv, l, d) - } - private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Node[A, B1], r: Node[A, B1]): Node[A, B1] = { - if (isRedNode(r) && isRedNode(r.left)) - RedNode(r.left.key, r.left.value, BlackNode(x, xv, a, r.left.left), BlackNode(r.key, r.value, r.left.right, r.right)) - else if (isRedNode(r) && isRedNode(r.right)) - RedNode(r.key, r.value, BlackNode(x, xv, a, r.left), BlackNode(r.right.key, r.right.value, r.right.left, r.right.right)) - else - mkNode(isBlack, x, xv, a, r) - } - private[this] def upd[A, B, B1 >: B](tree: Node[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Node[A, B1] = if (tree eq null) { - RedNode(k, v, null, null) - } else { - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) balanceLeft(tree.isBlack, tree.key, tree.value, upd(tree.left, k, v), tree.right) - else if (cmp > 0) balanceRight(tree.isBlack, tree.key, tree.value, tree.left, upd(tree.right, k, v)) - else mkNode(tree.isBlack, k, v, tree.left, tree.right) - } - - // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees - // http://www.cse.unsw.edu.au/~dons/data/RedBlackNode.html - private[this] def del[A, B](tree: Node[A, B], k: A)(implicit ordering: Ordering[A]): Node[A, B] = if (tree eq null) null else { - def balance(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tl)) { - if (isRedNode(tr)) { - RedNode(x, xv, tl.black, tr.black) - } else if (isRedNode(tl.left)) { - RedNode(tl.key, tl.value, tl.left.black, BlackNode(x, xv, tl.right, tr)) - } else if (isRedNode(tl.right)) { - RedNode(tl.right.key, tl.right.value, BlackNode(tl.key, tl.value, tl.left, tl.right.left), BlackNode(x, xv, tl.right.right, tr)) - } else { - BlackNode(x, xv, tl, tr) - } - } else if (isRedNode(tr)) { - if (isRedNode(tr.right)) { - RedNode(tr.key, tr.value, BlackNode(x, xv, tl, tr.left), tr.right.black) - } else if (isRedNode(tr.left)) { - RedNode(tr.left.key, tr.left.value, BlackNode(x, xv, tl, tr.left.left), BlackNode(tr.key, tr.value, tr.left.right, tr.right)) - } else { - BlackNode(x, xv, tl, tr) - } - } else { - BlackNode(x, xv, tl, tr) - } - def subl(t: Node[A, B]) = - if (t.isInstanceOf[BlackNode[_, _]]) t.red - else sys.error("Defect: invariance violation; expected black, got "+t) - - def balLeft(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tl)) { - RedNode(x, xv, tl.black, tr) - } else if (isBlackNode(tr)) { - balance(x, xv, tl, tr.red) - } else if (isRedNode(tr) && isBlackNode(tr.left)) { - RedNode(tr.left.key, tr.left.value, BlackNode(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) - } else { - sys.error("Defect: invariance violation") - } - def balRight(x: A, xv: B, tl: Node[A, B], tr: Node[A, B]) = if (isRedNode(tr)) { - RedNode(x, xv, tl, tr.black) - } else if (isBlackNode(tl)) { - balance(x, xv, tl.red, tr) - } else if (isRedNode(tl) && isBlackNode(tl.right)) { - RedNode(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackNode(x, xv, tl.right.right, tr)) - } else { - sys.error("Defect: invariance violation") - } - def delLeft = if (isBlackNode(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedNode(tree.key, tree.value, del(tree.left, k), tree.right) - def delRight = if (isBlackNode(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedNode(tree.key, tree.value, tree.left, del(tree.right, k)) - def append(tl: Node[A, B], tr: Node[A, B]): Node[A, B] = if (tl eq null) { - tr - } else if (tr eq null) { - tl - } else if (isRedNode(tl) && isRedNode(tr)) { - val bc = append(tl.right, tr.left) - if (isRedNode(bc)) { - RedNode(bc.key, bc.value, RedNode(tl.key, tl.value, tl.left, bc.left), RedNode(tr.key, tr.value, bc.right, tr.right)) - } else { - RedNode(tl.key, tl.value, tl.left, RedNode(tr.key, tr.value, bc, tr.right)) - } - } else if (isBlackNode(tl) && isBlackNode(tr)) { - val bc = append(tl.right, tr.left) - if (isRedNode(bc)) { - RedNode(bc.key, bc.value, BlackNode(tl.key, tl.value, tl.left, bc.left), BlackNode(tr.key, tr.value, bc.right, tr.right)) - } else { - balLeft(tl.key, tl.value, tl.left, BlackNode(tr.key, tr.value, bc, tr.right)) - } - } else if (isRedNode(tr)) { - RedNode(tr.key, tr.value, append(tl, tr.left), tr.right) - } else if (isRedNode(tl)) { - RedNode(tl.key, tl.value, tl.left, append(tl.right, tr)) - } else { - sys.error("unmatched tree on append: " + tl + ", " + tr) - } - - val cmp = ordering.compare(k, tree.key) - if (cmp < 0) delLeft - else if (cmp > 0) delRight - else append(tree.left, tree.right) - } - - private[this] def rng[A, B](tree: Node[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Node[A, B] = { - if (tree eq null) return null - if (from == None && until == None) return tree - if (from != None && ordering.lt(tree.key, from.get)) return rng(tree.right, from, until); - if (until != None && ordering.lteq(until.get, tree.key)) return rng(tree.left, from, until); - val newLeft = rng(tree.left, from, None) - val newRight = rng(tree.right, None, until) - if ((newLeft eq tree.left) && (newRight eq tree.right)) tree - else if (newLeft eq null) upd(newRight, tree.key, tree.value); - else if (newRight eq null) upd(newLeft, tree.key, tree.value); - else rebalance(tree, newLeft, newRight) - } - - // The zipper returned might have been traversed left-most (always the left child) - // or right-most (always the right child). Left trees are traversed right-most, - // and right trees are traversed leftmost. - - // Returns the zipper for the side with deepest black nodes depth, a flag - // indicating whether the trees were unbalanced at all, and a flag indicating - // whether the zipper was traversed left-most or right-most. - - // If the trees were balanced, returns an empty zipper - private[this] def compareDepth[A, B](left: Node[A, B], right: Node[A, B]): (List[Node[A, B]], Boolean, Boolean, Int) = { - // Once a side is found to be deeper, unzip it to the bottom - def unzip(zipper: List[Node[A, B]], leftMost: Boolean): List[Node[A, B]] = { - val next = if (leftMost) zipper.head.left else zipper.head.right - next match { - case null => zipper - case node => unzip(node :: zipper, leftMost) - } - } - - // Unzip left tree on the rightmost side and right tree on the leftmost side until one is - // found to be deeper, or the bottom is reached - def unzipBoth(left: Node[A, B], - right: Node[A, B], - leftZipper: List[Node[A, B]], - rightZipper: List[Node[A, B]], - smallerDepth: Int): (List[Node[A, B]], Boolean, Boolean, Int) = { - if (isBlackNode(left) && isBlackNode(right)) { - unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1) - } else if (isRedNode(left) && isRedNode(right)) { - unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth) - } else if (isRedNode(right)) { - unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth) - } else if (isRedNode(left)) { - unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth) - } else if ((left eq null) && (right eq null)) { - (Nil, true, false, smallerDepth) - } else if ((left eq null) && isBlackNode(right)) { - val leftMost = true - (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth) - } else if (isBlackNode(left) && (right eq null)) { - val leftMost = false - (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth) - } else { - sys.error("unmatched trees in unzip: " + left + ", " + right) - } - } - unzipBoth(left, right, Nil, Nil, 0) - } - private[this] def rebalance[A, B](tree: Node[A, B], newLeft: Node[A, B], newRight: Node[A, B]) = { - // This is like drop(n-1), but only counting black nodes - def findDepth(zipper: List[Node[A, B]], depth: Int): List[Node[A, B]] = zipper match { - case head :: tail if isBlackNode(head) => - if (depth == 1) zipper else findDepth(tail, depth - 1) - case _ :: tail => findDepth(tail, depth) - case Nil => sys.error("Defect: unexpected empty zipper while computing range") - } - - // Blackening the smaller tree avoids balancing problems on union; - // this can't be done later, though, or it would change the result of compareDepth - val blkNewLeft = blacken(newLeft) - val blkNewRight = blacken(newRight) - val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) - - if (levelled) { - BlackNode(tree.key, tree.value, blkNewLeft, blkNewRight) - } else { - val zipFrom = findDepth(zipper, smallerDepth) - val union = if (leftMost) { - RedNode(tree.key, tree.value, blkNewLeft, zipFrom.head) - } else { - RedNode(tree.key, tree.value, zipFrom.head, blkNewRight) - } - val zippedTree = zipFrom.tail.foldLeft(union: Node[A, B]) { (tree, node) => - if (leftMost) - balanceLeft(node.isBlack, node.key, node.value, tree, node.right) - else - balanceRight(node.isBlack, node.key, node.value, node.left, tree) - } - zippedTree - } - } - - /* - * Forcing direct fields access using the @inline annotation helps speed up - * various operations (especially smallest/greatest and update/delete). - * - * Unfortunately the direct field access is not guaranteed to work (but - * works on the current implementation of the Scala compiler). - * - * An alternative is to implement the these classes using plain old Java code... - */ - sealed abstract class Node[A, +B]( - @(inline @getter) final val key: A, - @(inline @getter) final val value: B, - @(inline @getter) final val left: Node[A, B], - @(inline @getter) final val right: Node[A, B]) - extends Serializable { - final val count: Int = 1 + RedBlack.count(left) + RedBlack.count(right) - def isBlack: Boolean - def black: Node[A, B] - def red: Node[A, B] - } - final class RedNode[A, +B](key: A, - value: B, - left: Node[A, B], - right: Node[A, B]) extends Node[A, B](key, value, left, right) { - override def isBlack = false - override def black = BlackNode(key, value, left, right) - override def red = this - override def toString = "RedNode(" + key + ", " + value + ", " + left + ", " + right + ")" - } - final class BlackNode[A, +B](key: A, - value: B, - left: Node[A, B], - right: Node[A, B]) extends Node[A, B](key, value, left, right) { - override def isBlack = true - override def black = this - override def red = RedNode(key, value, left, right) - override def toString = "BlackNode(" + key + ", " + value + ", " + left + ", " + right + ")" - } - - object RedNode { - @inline def apply[A, B](key: A, value: B, left: Node[A, B], right: Node[A, B]) = new RedNode(key, value, left, right) - def unapply[A, B](t: RedNode[A, B]) = Some((t.key, t.value, t.left, t.right)) - } - object BlackNode { - @inline def apply[A, B](key: A, value: B, left: Node[A, B], right: Node[A, B]) = new BlackNode(key, value, left, right) - def unapply[A, B](t: BlackNode[A, B]) = Some((t.key, t.value, t.left, t.right)) - } - - private[this] abstract class TreeIterator[A, B, R](tree: Node[A, B]) extends Iterator[R] { - protected[this] def nextResult(tree: Node[A, B]): R - - override def hasNext: Boolean = next ne null - - override def next: R = next match { - case null => - throw new NoSuchElementException("next on empty iterator") - case tree => - next = findNext(tree.right) - nextResult(tree) - } - - @tailrec - private[this] def findNext(tree: Node[A, B]): Node[A, B] = { - if (tree eq null) popPath() - else if (tree.left eq null) tree - else { - pushPath(tree) - findNext(tree.left) - } - } - - private[this] def pushPath(tree: Node[A, B]) { - try { - path(index) = tree - index += 1 - } catch { - case _: ArrayIndexOutOfBoundsException => - /* - * Either the tree became unbalanced or we calculated the maximum height incorrectly. - * To avoid crashing the iterator we expand the path array. Obviously this should never - * happen... - * - * An exception handler is used instead of an if-condition to optimize the normal path. - * This makes a large difference in iteration speed! - */ - assert(index >= path.length) - path :+= null - pushPath(tree) - } - } - private[this] def popPath(): Node[A, B] = if (index == 0) null else { - index -= 1 - path(index) - } - - private[this] var path = if (tree eq null) null else { - /* - * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] - * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. - * - * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) - * - * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one. - */ - val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1 - new Array[Node[A, B]](maximumHeight) - } - private[this] var index = 0 - private[this] var next: Node[A, B] = findNext(tree) - } - - private[this] class EntriesIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, (A, B)](tree) { - override def nextResult(tree: Node[A, B]) = (tree.key, tree.value) - } - - private[this] class KeysIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, A](tree) { - override def nextResult(tree: Node[A, B]) = tree.key - } - - private[this] class ValuesIterator[A, B](tree: Node[A, B]) extends TreeIterator[A, B, B](tree) { - override def nextResult(tree: Node[A, B]) = tree.value - } -} - - /** Old base class that was used by previous implementations of `TreeMaps` and `TreeSets`. * * Deprecated due to various performance bugs (see [[https://issues.scala-lang.org/browse/SI-5331 SI-5331]] for more information). diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala new file mode 100644 index 0000000000..ebd88ce3fe --- /dev/null +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -0,0 +1,416 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + + +package scala.collection +package immutable + +import annotation.tailrec +import annotation.meta.getter + +/** An object containing the RedBlack tree implementation used by for `TreeMaps` and `TreeSets`. + * + * Implementation note: since efficiency is important for data structures this implementation + * uses null to represent empty trees. This also means pattern matching cannot + * easily be used. The API represented by the RedBlackTree object tries to hide these + * optimizations behind a reasonably clean API. + * + * @since 2.10 + */ +private[immutable] +object RedBlackTree { + + def isEmpty(tree: Tree[_, _]): Boolean = tree eq null + + def contains[A](tree: Tree[A, _], x: A)(implicit ordering: Ordering[A]): Boolean = lookup(tree, x) ne null + def get[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Option[B] = lookup(tree, x) match { + case null => None + case tree => Some(tree.value) + } + + @tailrec + def lookup[A, B](tree: Tree[A, B], x: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + val cmp = ordering.compare(x, tree.key) + if (cmp < 0) lookup(tree.left, x) + else if (cmp > 0) lookup(tree.right, x) + else tree + } + + def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count + def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) + def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) + def range[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(tree, from, until)) + + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty map") + var result = tree + while (result.left ne null) result = result.left + result + } + def greatest[A, B](tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) throw new NoSuchElementException("empty map") + var result = tree + while (result.right ne null) result = result.right + result + } + + def foreach[A, B, U](tree: Tree[A, B], f: ((A, B)) => U): Unit = if (tree ne null) { + if (tree.left ne null) foreach(tree.left, f) + f((tree.key, tree.value)) + if (tree.right ne null) foreach(tree.right, f) + } + def foreachKey[A, U](tree: Tree[A, _], f: A => U): Unit = if (tree ne null) { + if (tree.left ne null) foreachKey(tree.left, f) + f(tree.key) + if (tree.right ne null) foreachKey(tree.right, f) + } + + def iterator[A, B](tree: Tree[A, B]): Iterator[(A, B)] = new EntriesIterator(tree) + def keysIterator[A, _](tree: Tree[A, _]): Iterator[A] = new KeysIterator(tree) + def valuesIterator[_, B](tree: Tree[_, B]): Iterator[B] = new ValuesIterator(tree) + + @tailrec + def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + val count = RedBlackTree.count(tree.left) + if (n < count) nth(tree.left, n) + else if (n > count) nth(tree.right, n - count - 1) + else tree + } + + def isBlack(tree: Tree[_, _]) = (tree eq null) || isBlackTree(tree) + + private[this] def isRedTree(tree: Tree[_, _]) = tree.isInstanceOf[RedTree[_, _]] + private[this] def isBlackTree(tree: Tree[_, _]) = tree.isInstanceOf[BlackTree[_, _]] + + private[this] def blacken[A, B](t: Tree[A, B]): Tree[A, B] = if (t eq null) null else t.black + + private[this] def mkTree[A, B](isBlack: Boolean, k: A, v: B, l: Tree[A, B], r: Tree[A, B]) = + if (isBlack) BlackTree(k, v, l, r) else RedTree(k, v, l, r) + + private[this] def balanceLeft[A, B, B1 >: B](isBlack: Boolean, z: A, zv: B, l: Tree[A, B1], d: Tree[A, B1]): Tree[A, B1] = { + if (isRedTree(l) && isRedTree(l.left)) + RedTree(l.key, l.value, BlackTree(l.left.key, l.left.value, l.left.left, l.left.right), BlackTree(z, zv, l.right, d)) + else if (isRedTree(l) && isRedTree(l.right)) + RedTree(l.right.key, l.right.value, BlackTree(l.key, l.value, l.left, l.right.left), BlackTree(z, zv, l.right.right, d)) + else + mkTree(isBlack, z, zv, l, d) + } + private[this] def balanceRight[A, B, B1 >: B](isBlack: Boolean, x: A, xv: B, a: Tree[A, B1], r: Tree[A, B1]): Tree[A, B1] = { + if (isRedTree(r) && isRedTree(r.left)) + RedTree(r.left.key, r.left.value, BlackTree(x, xv, a, r.left.left), BlackTree(r.key, r.value, r.left.right, r.right)) + else if (isRedTree(r) && isRedTree(r.right)) + RedTree(r.key, r.value, BlackTree(x, xv, a, r.left), BlackTree(r.right.key, r.right.value, r.right.left, r.right.right)) + else + mkTree(isBlack, x, xv, a, r) + } + private[this] def upd[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = if (tree eq null) { + RedTree(k, v, null, null) + } else { + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) balanceLeft(isBlackTree(tree), tree.key, tree.value, upd(tree.left, k, v), tree.right) + else if (cmp > 0) balanceRight(isBlackTree(tree), tree.key, tree.value, tree.left, upd(tree.right, k, v)) + else mkTree(isBlackTree(tree), k, v, tree.left, tree.right) + } + + // Based on Stefan Kahrs' Haskell version of Okasaki's Red&Black Trees + // http://www.cse.unsw.edu.au/~dons/data/RedBlackTree.html + private[this] def del[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = if (tree eq null) null else { + def balance(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + if (isRedTree(tr)) { + RedTree(x, xv, tl.black, tr.black) + } else if (isRedTree(tl.left)) { + RedTree(tl.key, tl.value, tl.left.black, BlackTree(x, xv, tl.right, tr)) + } else if (isRedTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, BlackTree(tl.key, tl.value, tl.left, tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + BlackTree(x, xv, tl, tr) + } + } else if (isRedTree(tr)) { + if (isRedTree(tr.right)) { + RedTree(tr.key, tr.value, BlackTree(x, xv, tl, tr.left), tr.right.black) + } else if (isRedTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), BlackTree(tr.key, tr.value, tr.left.right, tr.right)) + } else { + BlackTree(x, xv, tl, tr) + } + } else { + BlackTree(x, xv, tl, tr) + } + def subl(t: Tree[A, B]) = + if (t.isInstanceOf[BlackTree[_, _]]) t.red + else sys.error("Defect: invariance violation; expected black, got "+t) + + def balLeft(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tl)) { + RedTree(x, xv, tl.black, tr) + } else if (isBlackTree(tr)) { + balance(x, xv, tl, tr.red) + } else if (isRedTree(tr) && isBlackTree(tr.left)) { + RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) + } else { + sys.error("Defect: invariance violation at ") // TODO + } + def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { + RedTree(x, xv, tl, tr.black) + } else if (isBlackTree(tl)) { + balance(x, xv, tl.red, tr) + } else if (isRedTree(tl) && isBlackTree(tl.right)) { + RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) + } else { + sys.error("Defect: invariance violation at ") // TODO + } + def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right) + def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k)) + def append(tl: Tree[A, B], tr: Tree[A, B]): Tree[A, B] = if (tl eq null) { + tr + } else if (tr eq null) { + tl + } else if (isRedTree(tl) && isRedTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, RedTree(tl.key, tl.value, tl.left, bc.left), RedTree(tr.key, tr.value, bc.right, tr.right)) + } else { + RedTree(tl.key, tl.value, tl.left, RedTree(tr.key, tr.value, bc, tr.right)) + } + } else if (isBlackTree(tl) && isBlackTree(tr)) { + val bc = append(tl.right, tr.left) + if (isRedTree(bc)) { + RedTree(bc.key, bc.value, BlackTree(tl.key, tl.value, tl.left, bc.left), BlackTree(tr.key, tr.value, bc.right, tr.right)) + } else { + balLeft(tl.key, tl.value, tl.left, BlackTree(tr.key, tr.value, bc, tr.right)) + } + } else if (isRedTree(tr)) { + RedTree(tr.key, tr.value, append(tl, tr.left), tr.right) + } else if (isRedTree(tl)) { + RedTree(tl.key, tl.value, tl.left, append(tl.right, tr)) + } else { + sys.error("unmatched tree on append: " + tl + ", " + tr) + } + + val cmp = ordering.compare(k, tree.key) + if (cmp < 0) delLeft + else if (cmp > 0) delRight + else append(tree.left, tree.right) + } + + private[this] def rng[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (from == None && until == None) return tree + if (from != None && ordering.lt(tree.key, from.get)) return rng(tree.right, from, until); + if (until != None && ordering.lteq(until.get, tree.key)) return rng(tree.left, from, until); + val newLeft = rng(tree.left, from, None) + val newRight = rng(tree.right, None, until) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value); + else if (newRight eq null) upd(newLeft, tree.key, tree.value); + else rebalance(tree, newLeft, newRight) + } + + // The zipper returned might have been traversed left-most (always the left child) + // or right-most (always the right child). Left trees are traversed right-most, + // and right trees are traversed leftmost. + + // Returns the zipper for the side with deepest black nodes depth, a flag + // indicating whether the trees were unbalanced at all, and a flag indicating + // whether the zipper was traversed left-most or right-most. + + // If the trees were balanced, returns an empty zipper + private[this] def compareDepth[A, B](left: Tree[A, B], right: Tree[A, B]): (List[Tree[A, B]], Boolean, Boolean, Int) = { + // Once a side is found to be deeper, unzip it to the bottom + def unzip(zipper: List[Tree[A, B]], leftMost: Boolean): List[Tree[A, B]] = { + val next = if (leftMost) zipper.head.left else zipper.head.right + next match { + case null => zipper + case node => unzip(node :: zipper, leftMost) + } + } + + // Unzip left tree on the rightmost side and right tree on the leftmost side until one is + // found to be deeper, or the bottom is reached + def unzipBoth(left: Tree[A, B], + right: Tree[A, B], + leftZipper: List[Tree[A, B]], + rightZipper: List[Tree[A, B]], + smallerDepth: Int): (List[Tree[A, B]], Boolean, Boolean, Int) = { + if (isBlackTree(left) && isBlackTree(right)) { + unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth + 1) + } else if (isRedTree(left) && isRedTree(right)) { + unzipBoth(left.right, right.left, left :: leftZipper, right :: rightZipper, smallerDepth) + } else if (isRedTree(right)) { + unzipBoth(left, right.left, leftZipper, right :: rightZipper, smallerDepth) + } else if (isRedTree(left)) { + unzipBoth(left.right, right, left :: leftZipper, rightZipper, smallerDepth) + } else if ((left eq null) && (right eq null)) { + (Nil, true, false, smallerDepth) + } else if ((left eq null) && isBlackTree(right)) { + val leftMost = true + (unzip(right :: rightZipper, leftMost), false, leftMost, smallerDepth) + } else if (isBlackTree(left) && (right eq null)) { + val leftMost = false + (unzip(left :: leftZipper, leftMost), false, leftMost, smallerDepth) + } else { + sys.error("unmatched trees in unzip: " + left + ", " + right) + } + } + unzipBoth(left, right, Nil, Nil, 0) + } + + private[this] def rebalance[A, B](tree: Tree[A, B], newLeft: Tree[A, B], newRight: Tree[A, B]) = { + // This is like drop(n-1), but only counting black nodes + def findDepth(zipper: List[Tree[A, B]], depth: Int): List[Tree[A, B]] = zipper match { + case head :: tail if isBlackTree(head) => + if (depth == 1) zipper else findDepth(tail, depth - 1) + case _ :: tail => findDepth(tail, depth) + case Nil => sys.error("Defect: unexpected empty zipper while computing range") + } + + // Blackening the smaller tree avoids balancing problems on union; + // this can't be done later, though, or it would change the result of compareDepth + val blkNewLeft = blacken(newLeft) + val blkNewRight = blacken(newRight) + val (zipper, levelled, leftMost, smallerDepth) = compareDepth(blkNewLeft, blkNewRight) + + if (levelled) { + BlackTree(tree.key, tree.value, blkNewLeft, blkNewRight) + } else { + val zipFrom = findDepth(zipper, smallerDepth) + val union = if (leftMost) { + RedTree(tree.key, tree.value, blkNewLeft, zipFrom.head) + } else { + RedTree(tree.key, tree.value, zipFrom.head, blkNewRight) + } + val zippedTree = zipFrom.tail.foldLeft(union: Tree[A, B]) { (tree, node) => + if (leftMost) + balanceLeft(isBlackTree(node), node.key, node.value, tree, node.right) + else + balanceRight(isBlackTree(node), node.key, node.value, node.left, tree) + } + zippedTree + } + } + + /* + * Forcing direct fields access using the @inline annotation helps speed up + * various operations (especially smallest/greatest and update/delete). + * + * Unfortunately the direct field access is not guaranteed to work (but + * works on the current implementation of the Scala compiler). + * + * An alternative is to implement the these classes using plain old Java code... + */ + sealed abstract class Tree[A, +B]( + @(inline @getter) final val key: A, + @(inline @getter) final val value: B, + @(inline @getter) final val left: Tree[A, B], + @(inline @getter) final val right: Tree[A, B]) + extends Serializable { + final val count: Int = 1 + RedBlackTree.count(left) + RedBlackTree.count(right) + def black: Tree[A, B] + def red: Tree[A, B] + } + final class RedTree[A, +B](key: A, + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + override def black: Tree[A, B] = BlackTree(key, value, left, right) + override def red: Tree[A, B] = this + override def toString: String = "RedTree(" + key + ", " + value + ", " + left + ", " + right + ")" + } + final class BlackTree[A, +B](key: A, + value: B, + left: Tree[A, B], + right: Tree[A, B]) extends Tree[A, B](key, value, left, right) { + override def black: Tree[A, B] = this + override def red: Tree[A, B] = RedTree(key, value, left, right) + override def toString: String = "BlackTree(" + key + ", " + value + ", " + left + ", " + right + ")" + } + + object RedTree { + @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new RedTree(key, value, left, right) + def unapply[A, B](t: RedTree[A, B]) = Some((t.key, t.value, t.left, t.right)) + } + object BlackTree { + @inline def apply[A, B](key: A, value: B, left: Tree[A, B], right: Tree[A, B]) = new BlackTree(key, value, left, right) + def unapply[A, B](t: BlackTree[A, B]) = Some((t.key, t.value, t.left, t.right)) + } + + private[this] abstract class TreeIterator[A, B, R](tree: Tree[A, B]) extends Iterator[R] { + protected[this] def nextResult(tree: Tree[A, B]): R + + override def hasNext: Boolean = next ne null + + override def next: R = next match { + case null => + throw new NoSuchElementException("next on empty iterator") + case tree => + next = findNext(tree.right) + nextResult(tree) + } + + @tailrec + private[this] def findNext(tree: Tree[A, B]): Tree[A, B] = { + if (tree eq null) popPath() + else if (tree.left eq null) tree + else { + pushPath(tree) + findNext(tree.left) + } + } + + private[this] def pushPath(tree: Tree[A, B]) { + try { + path(index) = tree + index += 1 + } catch { + case _: ArrayIndexOutOfBoundsException => + /* + * Either the tree became unbalanced or we calculated the maximum height incorrectly. + * To avoid crashing the iterator we expand the path array. Obviously this should never + * happen... + * + * An exception handler is used instead of an if-condition to optimize the normal path. + * This makes a large difference in iteration speed! + */ + assert(index >= path.length) + path :+= null + pushPath(tree) + } + } + private[this] def popPath(): Tree[A, B] = if (index == 0) null else { + index -= 1 + path(index) + } + + private[this] var path = if (tree eq null) null else { + /* + * According to "Ralf Hinze. Constructing red-black trees" [http://www.cs.ox.ac.uk/ralf.hinze/publications/#P5] + * the maximum height of a red-black tree is 2*log_2(n + 2) - 2. + * + * According to {@see Integer#numberOfLeadingZeros} ceil(log_2(n)) = (32 - Integer.numberOfLeadingZeros(n - 1)) + * + * We also don't store the deepest nodes in the path so the maximum path length is further reduced by one. + */ + val maximumHeight = 2 * (32 - Integer.numberOfLeadingZeros(tree.count + 2 - 1)) - 2 - 1 + new Array[Tree[A, B]](maximumHeight) + } + private[this] var index = 0 + private[this] var next: Tree[A, B] = findNext(tree) + } + + private[this] class EntriesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, (A, B)](tree) { + override def nextResult(tree: Tree[A, B]) = (tree.key, tree.value) + } + + private[this] class KeysIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, A](tree) { + override def nextResult(tree: Tree[A, B]) = tree.key + } + + private[this] class ValuesIterator[A, B](tree: Tree[A, B]) extends TreeIterator[A, B, B](tree) { + override def nextResult(tree: Tree[A, B]) = tree.value + } +} diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 50244ef21d..196c3a9d9d 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -12,6 +12,7 @@ package scala.collection package immutable import generic._ +import immutable.{RedBlackTree => RB} import mutable.Builder import annotation.bridge @@ -45,14 +46,12 @@ object TreeMap extends ImmutableSortedMapFactory[TreeMap] { * @define mayNotTerminateInf * @define willNotTerminateInf */ -class TreeMap[A, +B] private (tree: RedBlack.Node[A, B])(implicit val ordering: Ordering[A]) +class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Ordering[A]) extends SortedMap[A, B] with SortedMapLike[A, B, TreeMap[A, B]] with MapLike[A, B, TreeMap[A, B]] with Serializable { - import immutable.{RedBlack => RB} - @deprecated("use `ordering.lt` instead", "2.10") def isSmaller(x: A, y: A) = ordering.lt(x, y) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 899ef0e5eb..12e2197732 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -12,6 +12,7 @@ package scala.collection package immutable import generic._ +import immutable.{RedBlackTree => RB} import mutable.{ Builder, SetBuilder } /** $factoryInfo @@ -47,11 +48,9 @@ object TreeSet extends ImmutableSortedSetFactory[TreeSet] { * @define willNotTerminateInf */ @SerialVersionUID(-5685982407650748405L) -class TreeSet[A] private (tree: RedBlack.Node[A, Unit])(implicit val ordering: Ordering[A]) +class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Ordering[A]) extends SortedSet[A] with SortedSetLike[A, TreeSet[A]] with Serializable { - import immutable.{RedBlack => RB} - override def stringPrefix = "TreeSet" override def size = RB.count(tree) @@ -105,7 +104,7 @@ class TreeSet[A] private (tree: RedBlack.Node[A, Unit])(implicit val ordering: O def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - private def newSet(t: RedBlack.Node[A, Unit]) = new TreeSet[A](t) + private def newSet(t: RB.Tree[A, Unit]) = new TreeSet[A](t) /** A factory to create empty sets of the same type of keys. */ diff --git a/test/files/scalacheck/redblack.scala b/test/files/scalacheck/redblack.scala index 83d3ca0c1f..bbc6504f58 100644 --- a/test/files/scalacheck/redblack.scala +++ b/test/files/scalacheck/redblack.scala @@ -1,4 +1,3 @@ -import collection.immutable._ import org.scalacheck._ import Prop._ import Gen._ @@ -15,23 +14,26 @@ Both children of every red node are black. Every simple path from a given node to any of its descendant leaves contains the same number of black nodes. */ -package scala.collection.immutable { abstract class RedBlackTest extends Properties("RedBlack") { def minimumSize = 0 def maximumSize = 5 - import RedBlack._ + object RedBlackTest extends scala.collection.immutable.RedBlack[String] { + def isSmaller(x: String, y: String) = x < y + } + + import RedBlackTest._ - def nodeAt[A](tree: Node[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) - Some(iterator(tree).drop(n).next) + def nodeAt[A](tree: Tree[A], n: Int): Option[(String, A)] = if (n < tree.iterator.size && n >= 0) + Some(tree.iterator.drop(n).next) else None - def treeContains[A](tree: Node[String, A], key: String) = iterator(tree).map(_._1) contains key + def treeContains[A](tree: Tree[A], key: String) = tree.iterator.map(_._1) contains key - def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Node[String, Int]] = + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[Int]] = if (level == 0) { - value(null) + value(Empty) } else { for { oddOrEven <- choose(0, 2) @@ -42,9 +44,9 @@ abstract class RedBlackTest extends Properties("RedBlack") { right <- mkTree(nextLevel, !isRed, label + "R") } yield { if (isRed) - RedNode(label + "N", 0, left, right) + RedTree(label + "N", 0, left, right) else - BlackNode(label + "N", 0, left, right) + BlackTree(label + "N", 0, left, right) } } @@ -54,10 +56,10 @@ abstract class RedBlackTest extends Properties("RedBlack") { } yield tree type ModifyParm - def genParm(tree: Node[String, Int]): Gen[ModifyParm] - def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] + def genParm(tree: Tree[Int]): Gen[ModifyParm] + def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] - def genInput: Gen[(Node[String, Int], ModifyParm, Node[String, Int])] = for { + def genInput: Gen[(Tree[Int], ModifyParm, Tree[Int])] = for { tree <- genTree parm <- genParm(tree) } yield (tree, parm, modify(tree, parm)) @@ -66,30 +68,30 @@ abstract class RedBlackTest extends Properties("RedBlack") { trait RedBlackInvariants { self: RedBlackTest => - import RedBlack._ + import RedBlackTest._ - def rootIsBlack[A](t: Node[String, A]) = isBlack(t) + def rootIsBlack[A](t: Tree[A]) = t.isBlack - def areAllLeavesBlack[A](t: Node[String, A]): Boolean = t match { - case null => isBlack(t) - case ne => List(ne.left, ne.right) forall areAllLeavesBlack + def areAllLeavesBlack[A](t: Tree[A]): Boolean = t match { + case Empty => t.isBlack + case ne: NonEmpty[_] => List(ne.left, ne.right) forall areAllLeavesBlack } - def areRedNodeChildrenBlack[A](t: Node[String, A]): Boolean = t match { - case RedNode(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t)) - case BlackNode(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack - case null => true + def areRedNodeChildrenBlack[A](t: Tree[A]): Boolean = t match { + case RedTree(_, _, left, right) => List(left, right) forall (t => t.isBlack && areRedNodeChildrenBlack(t)) + case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack + case Empty => true } - def blackNodesToLeaves[A](t: Node[String, A]): List[Int] = t match { - case null => List(1) - case BlackNode(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) - case RedNode(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves + def blackNodesToLeaves[A](t: Tree[A]): List[Int] = t match { + case Empty => List(1) + case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) + case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves } - def areBlackNodesToLeavesEqual[A](t: Node[String, A]): Boolean = t match { - case null => true - case ne => + def areBlackNodesToLeavesEqual[A](t: Tree[A]): Boolean = t match { + case Empty => true + case ne: NonEmpty[_] => ( blackNodesToLeaves(ne).distinct.size == 1 && areBlackNodesToLeavesEqual(ne.left) @@ -97,10 +99,10 @@ trait RedBlackInvariants { ) } - def orderIsPreserved[A](t: Node[String, A]): Boolean = - iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 } + def orderIsPreserved[A](t: Tree[A]): Boolean = + t.iterator zip t.iterator.drop(1) forall { case (x, y) => isSmaller(x._1, y._1) } - def setup(invariant: Node[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => + def setup(invariant: Tree[Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => invariant(newTree) } @@ -112,13 +114,13 @@ trait RedBlackInvariants { } object TestInsert extends RedBlackTest with RedBlackInvariants { - import RedBlack._ + import RedBlackTest._ override type ModifyParm = Int - override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1) - override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = update(tree, generateKey(tree, parm), 0) + override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size + 1) + override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = tree update (generateKey(tree, parm), 0) - def generateKey(tree: Node[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { + def generateKey(tree: Tree[Int], parm: ModifyParm): String = nodeAt(tree, parm) match { case Some((key, _)) => key.init.mkString + "MN" case None => nodeAt(tree, parm - 1) match { case Some((key, _)) => key.init.mkString + "RN" @@ -132,31 +134,31 @@ object TestInsert extends RedBlackTest with RedBlackInvariants { } object TestModify extends RedBlackTest { - import RedBlack._ + import RedBlackTest._ def newValue = 1 override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) - override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = nodeAt(tree, parm) map { - case (key, _) => update(tree, key, newValue) + override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) + override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { + case (key, _) => tree update (key, newValue) } getOrElse tree property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) => nodeAt(tree,parm) forall { case (key, _) => - iterator(newTree) contains (key, newValue) + newTree.iterator contains (key, newValue) } } } object TestDelete extends RedBlackTest with RedBlackInvariants { - import RedBlack._ + import RedBlackTest._ override def minimumSize = 1 override type ModifyParm = Int - override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) - override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = nodeAt(tree, parm) map { - case (key, _) => delete(tree, key) + override def genParm(tree: Tree[Int]): Gen[ModifyParm] = choose(0, tree.iterator.size) + override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = nodeAt(tree, parm) map { + case (key, _) => tree delete key } getOrElse tree property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) => @@ -167,41 +169,40 @@ object TestDelete extends RedBlackTest with RedBlackInvariants { } object TestRange extends RedBlackTest with RedBlackInvariants { - import RedBlack._ + import RedBlackTest._ override type ModifyParm = (Option[Int], Option[Int]) - override def genParm(tree: Node[String, Int]): Gen[ModifyParm] = for { - from <- choose(0, iterator(tree).size) - to <- choose(0, iterator(tree).size) suchThat (from <=) + override def genParm(tree: Tree[Int]): Gen[ModifyParm] = for { + from <- choose(0, tree.iterator.size) + to <- choose(0, tree.iterator.size) suchThat (from <=) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug } yield (optionalFrom, optionalTo) - override def modify(tree: Node[String, Int], parm: ModifyParm): Node[String, Int] = { + override def modify(tree: Tree[Int], parm: ModifyParm): Tree[Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - range(tree, from, to) + tree range (from, to) } property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - ("lower boundary" |: (from forall ( key => iterator(newTree).map(_._1) forall (key <=)))) && - ("upper boundary" |: (to forall ( key => iterator(newTree).map(_._1) forall (key >)))) + ("lower boundary" |: (from forall ( key => newTree.iterator.map(_._1) forall (key <=)))) && + ("upper boundary" |: (to forall ( key => newTree.iterator.map(_._1) forall (key >)))) } property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - val filteredTree = (iterator(tree) + val filteredTree = (tree.iterator .map(_._1) .filter(key => from forall (key >=)) .filter(key => to forall (key <)) .toList) - filteredTree == iterator(newTree).map(_._1).toList + filteredTree == newTree.iterator.map(_._1).toList } } -} object Test extends Properties("RedBlack") { include(TestInsert) diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala new file mode 100644 index 0000000000..10f3f0fbbf --- /dev/null +++ b/test/files/scalacheck/redblacktree.scala @@ -0,0 +1,212 @@ +import collection.immutable.{RedBlackTree => RB} +import org.scalacheck._ +import Prop._ +import Gen._ + +/* +Properties of a Red & Black Tree: + +A node is either red or black. +The root is black. (This rule is used in some definitions and not others. Since the +root can always be changed from red to black but not necessarily vice-versa this +rule has little effect on analysis.) +All leaves are black. +Both children of every red node are black. +Every simple path from a given node to any of its descendant leaves contains the same number of black nodes. +*/ + +package scala.collection.immutable.redblacktree { + abstract class RedBlackTreeTest extends Properties("RedBlackTree") { + def minimumSize = 0 + def maximumSize = 5 + + import RB._ + + def nodeAt[A](tree: Tree[String, A], n: Int): Option[(String, A)] = if (n < iterator(tree).size && n >= 0) + Some(iterator(tree).drop(n).next) + else + None + + def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key + + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = + if (level == 0) { + value(null) + } else { + for { + oddOrEven <- choose(0, 2) + tryRed = oddOrEven.sample.get % 2 == 0 // work around arbitrary[Boolean] bug + isRed = parentIsBlack && tryRed + nextLevel = if (isRed) level else level - 1 + left <- mkTree(nextLevel, !isRed, label + "L") + right <- mkTree(nextLevel, !isRed, label + "R") + } yield { + if (isRed) + RedTree(label + "N", 0, left, right) + else + BlackTree(label + "N", 0, left, right) + } + } + + def genTree = for { + depth <- choose(minimumSize, maximumSize + 1) + tree <- mkTree(depth) + } yield tree + + type ModifyParm + def genParm(tree: Tree[String, Int]): Gen[ModifyParm] + def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] + + def genInput: Gen[(Tree[String, Int], ModifyParm, Tree[String, Int])] = for { + tree <- genTree + parm <- genParm(tree) + } yield (tree, parm, modify(tree, parm)) + } + + trait RedBlackTreeInvariants { + self: RedBlackTreeTest => + + import RB._ + + def rootIsBlack[A](t: Tree[String, A]) = isBlack(t) + + def areAllLeavesBlack[A](t: Tree[String, A]): Boolean = t match { + case null => isBlack(t) + case ne => List(ne.left, ne.right) forall areAllLeavesBlack + } + + def areRedNodeChildrenBlack[A](t: Tree[String, A]): Boolean = t match { + case RedTree(_, _, left, right) => List(left, right) forall (t => isBlack(t) && areRedNodeChildrenBlack(t)) + case BlackTree(_, _, left, right) => List(left, right) forall areRedNodeChildrenBlack + case null => true + } + + def blackNodesToLeaves[A](t: Tree[String, A]): List[Int] = t match { + case null => List(1) + case BlackTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves map (_ + 1) + case RedTree(_, _, left, right) => List(left, right) flatMap blackNodesToLeaves + } + + def areBlackNodesToLeavesEqual[A](t: Tree[String, A]): Boolean = t match { + case null => true + case ne => + ( + blackNodesToLeaves(ne).distinct.size == 1 + && areBlackNodesToLeavesEqual(ne.left) + && areBlackNodesToLeavesEqual(ne.right) + ) + } + + def orderIsPreserved[A](t: Tree[String, A]): Boolean = + iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 } + + def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => + invariant(newTree) + } + + property("root is black") = setup(rootIsBlack) + property("all leaves are black") = setup(areAllLeavesBlack) + property("children of red nodes are black") = setup(areRedNodeChildrenBlack) + property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual) + property("ordering of keys is preserved") = setup(orderIsPreserved) + } + + object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants { + import RB._ + + override type ModifyParm = Int + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size + 1) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = update(tree, generateKey(tree, parm), 0) + + def generateKey(tree: Tree[String, Int], parm: ModifyParm): String = nodeAt(tree, parm) match { + case Some((key, _)) => key.init.mkString + "MN" + case None => nodeAt(tree, parm - 1) match { + case Some((key, _)) => key.init.mkString + "RN" + case None => "N" + } + } + + property("update adds elements") = forAll(genInput) { case (tree, parm, newTree) => + treeContains(newTree, generateKey(tree, parm)) + } + } + + object TestModify extends RedBlackTreeTest { + import RB._ + + def newValue = 1 + override def minimumSize = 1 + override type ModifyParm = Int + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + case (key, _) => update(tree, key, newValue) + } getOrElse tree + + property("update modifies values") = forAll(genInput) { case (tree, parm, newTree) => + nodeAt(tree,parm) forall { case (key, _) => + iterator(newTree) contains (key, newValue) + } + } + } + + object TestDelete extends RedBlackTreeTest with RedBlackTreeInvariants { + import RB._ + + override def minimumSize = 1 + override type ModifyParm = Int + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = choose(0, iterator(tree).size) + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = nodeAt(tree, parm) map { + case (key, _) => delete(tree, key) + } getOrElse tree + + property("delete removes elements") = forAll(genInput) { case (tree, parm, newTree) => + nodeAt(tree, parm) forall { case (key, _) => + !treeContains(newTree, key) + } + } + } + + object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants { + import RB._ + + override type ModifyParm = (Option[Int], Option[Int]) + override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { + from <- choose(0, iterator(tree).size) + to <- choose(0, iterator(tree).size) suchThat (from <=) + optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug + optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug + } yield (optionalFrom, optionalTo) + + override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { + val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) + range(tree, from, to) + } + + property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => + val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) + ("lower boundary" |: (from forall ( key => iterator(newTree).map(_._1) forall (key <=)))) && + ("upper boundary" |: (to forall ( key => iterator(newTree).map(_._1) forall (key >)))) + } + + property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => + val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) + val filteredTree = (iterator(tree) + .map(_._1) + .filter(key => from forall (key >=)) + .filter(key => to forall (key <)) + .toList) + filteredTree == iterator(newTree).map(_._1).toList + } + } +} + +object Test extends Properties("RedBlackTree") { + import collection.immutable.redblacktree._ + include(TestInsert) + include(TestModify) + include(TestDelete) + include(TestRange) +} -- cgit v1.2.3 From e61075c4e173d8fad5127e90046f5b91e97c3180 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 7 Jan 2012 19:20:46 +0100 Subject: Tests for takeWhile/dropWhile/span. Also simplified implementation of span to just use splitAt. --- src/library/scala/collection/immutable/TreeMap.scala | 5 +---- src/library/scala/collection/immutable/TreeSet.scala | 5 +---- test/files/scalacheck/treemap.scala | 15 +++++++++++++++ test/files/scalacheck/treeset.scala | 15 +++++++++++++++ 4 files changed, 32 insertions(+), 8 deletions(-) diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 196c3a9d9d..2bb8a566c6 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -116,10 +116,7 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi } override def dropWhile(p: ((A, B)) => Boolean) = drop(countWhile(p)) override def takeWhile(p: ((A, B)) => Boolean) = take(countWhile(p)) - override def span(p: ((A, B)) => Boolean) = { - val n = countWhile(p) - (take(n), drop(n)) - } + override def span(p: ((A, B)) => Boolean) = splitAt(countWhile(p)) /** A factory to create empty maps of the same type of keys. */ diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 12e2197732..8b95358d1c 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -94,10 +94,7 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin } override def dropWhile(p: A => Boolean) = drop(countWhile(p)) override def takeWhile(p: A => Boolean) = take(countWhile(p)) - override def span(p: A => Boolean) = { - val n = countWhile(p) - (take(n), drop(n)) - } + override def span(p: A => Boolean) = splitAt(countWhile(p)) @deprecated("use `ordering.lt` instead", "2.10") def isSmaller(x: A, y: A) = compare(x,y) < 0 diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala index 9970bb01aa..7d5f94d58b 100644 --- a/test/files/scalacheck/treemap.scala +++ b/test/files/scalacheck/treemap.scala @@ -96,6 +96,21 @@ object Test extends Properties("TreeMap") { prefix == subject.take(n) && suffix == subject.drop(n) } + property("takeWhile") = forAll { (subject: TreeMap[Int, String]) => + val result = subject.takeWhile(_._1 < 0) + result.forall(_._1 < 0) && result == subject.take(result.size) + } + + property("dropWhile") = forAll { (subject: TreeMap[Int, String]) => + val result = subject.dropWhile(_._1 < 0) + result.forall(_._1 >= 0) && result == subject.takeRight(result.size) + } + + property("span identity") = forAll { (subject: TreeMap[Int, String]) => + val (prefix, suffix) = subject.span(_._1 < 0) + prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix + } + property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { val key = oneOf(subject.keys.toSeq).sample.get val removed = subject - key diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala index 87c3eb7108..e47a1b6cdd 100644 --- a/test/files/scalacheck/treeset.scala +++ b/test/files/scalacheck/treeset.scala @@ -92,6 +92,21 @@ object Test extends Properties("TreeSet") { prefix == subject.take(n) && suffix == subject.drop(n) } + property("takeWhile") = forAll { (subject: TreeMap[Int, String]) => + val result = subject.takeWhile(_._1 < 0) + result.forall(_._1 < 0) && result == subject.take(result.size) + } + + property("dropWhile") = forAll { (subject: TreeMap[Int, String]) => + val result = subject.dropWhile(_._1 < 0) + result.forall(_._1 >= 0) && result == subject.takeRight(result.size) + } + + property("span identity") = forAll { (subject: TreeMap[Int, String]) => + val (prefix, suffix) = subject.span(_._1 < 0) + prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix + } + property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { val element = oneOf(subject.toSeq).sample.get val removed = subject - element -- cgit v1.2.3 From 8b3f984d4e2e444c0712a7457aefd159d4024b1f Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 7 Jan 2012 23:31:06 +0100 Subject: Fix silly copy-paste error. --- test/files/scalacheck/treeset.scala | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala index e47a1b6cdd..7f99aec77e 100644 --- a/test/files/scalacheck/treeset.scala +++ b/test/files/scalacheck/treeset.scala @@ -92,19 +92,19 @@ object Test extends Properties("TreeSet") { prefix == subject.take(n) && suffix == subject.drop(n) } - property("takeWhile") = forAll { (subject: TreeMap[Int, String]) => - val result = subject.takeWhile(_._1 < 0) - result.forall(_._1 < 0) && result == subject.take(result.size) + property("takeWhile") = forAll { (subject: TreeSet[Int]) => + val result = subject.takeWhile(_ < 0) + result.forall(_ < 0) && result == subject.take(result.size) } - property("dropWhile") = forAll { (subject: TreeMap[Int, String]) => - val result = subject.dropWhile(_._1 < 0) - result.forall(_._1 >= 0) && result == subject.takeRight(result.size) + property("dropWhile") = forAll { (subject: TreeSet[Int]) => + val result = subject.dropWhile(_ < 0) + result.forall(_ >= 0) && result == subject.takeRight(result.size) } - property("span identity") = forAll { (subject: TreeMap[Int, String]) => - val (prefix, suffix) = subject.span(_._1 < 0) - prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix + property("span identity") = forAll { (subject: TreeSet[Int]) => + val (prefix, suffix) = subject.span(_ < 0) + prefix.forall(_ < 0) && suffix.forall(_ >= 0) && subject == prefix ++ suffix } property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { -- cgit v1.2.3 From f26f610278887b842de3a4e4fdafb866dd1afb62 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sun, 8 Jan 2012 12:59:45 +0100 Subject: Test for maximum height of red-black tree. --- test/files/scalacheck/redblacktree.scala | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala index 10f3f0fbbf..34fa8eae8d 100644 --- a/test/files/scalacheck/redblacktree.scala +++ b/test/files/scalacheck/redblacktree.scala @@ -29,6 +29,8 @@ package scala.collection.immutable.redblacktree { def treeContains[A](tree: Tree[String, A], key: String) = iterator(tree).map(_._1) contains key + def height(tree: Tree[_, _]): Int = if (tree eq null) 0 else (1 + math.max(height(tree.left), height(tree.right))) + def mkTree(level: Int, parentIsBlack: Boolean = false, label: String = ""): Gen[Tree[String, Int]] = if (level == 0) { value(null) @@ -100,6 +102,8 @@ package scala.collection.immutable.redblacktree { def orderIsPreserved[A](t: Tree[String, A]): Boolean = iterator(t) zip iterator(t).drop(1) forall { case (x, y) => x._1 < y._1 } + def heightIsBounded(t: Tree[_, _]): Boolean = height(t) <= (2 * (32 - Integer.numberOfLeadingZeros(count(t) + 2)) - 2) + def setup(invariant: Tree[String, Int] => Boolean) = forAll(genInput) { case (tree, parm, newTree) => invariant(newTree) } @@ -109,6 +113,7 @@ package scala.collection.immutable.redblacktree { property("children of red nodes are black") = setup(areRedNodeChildrenBlack) property("black nodes are balanced") = setup(areBlackNodesToLeavesEqual) property("ordering of keys is preserved") = setup(orderIsPreserved) + property("height is bounded") = setup(heightIsBounded) } object TestInsert extends RedBlackTreeTest with RedBlackTreeInvariants { -- cgit v1.2.3 From 8cf889f06cb83f322ff3892175e978c25cd41d43 Mon Sep 17 00:00:00 2001 From: Lucien Pereira Date: Sat, 14 Jan 2012 09:52:41 +0100 Subject: syntactic error correction --- src/library/scala/collection/mutable/TreeSet.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 38fa0c953f..56b4b349cf 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -98,7 +98,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S } /** - * Thanks to the nature immutable of the + * Thanks to the immutable nature of the * underlying AVL Tree, we can share it with * the clone. So clone complexity in time is O(1). * -- cgit v1.2.3 From 00b5cb84df493aace270674054d2f6ddf3721131 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sun, 15 Jan 2012 13:48:00 +0100 Subject: Optimized implementation of TreeMap/TreeSet#to method. Performance of `to` and `until` is now the same. --- .../scala/collection/immutable/RedBlackTree.scala | 18 ++++++++----- .../scala/collection/immutable/TreeMap.scala | 10 ++++--- .../scala/collection/immutable/TreeSet.scala | 6 ++++- test/files/scalacheck/redblacktree.scala | 31 +++++++++++++--------- test/files/scalacheck/treemap.scala | 18 +++++++++++++ test/files/scalacheck/treeset.scala | 18 +++++++++++++ 6 files changed, 77 insertions(+), 24 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index ebd88ce3fe..d8caeab096 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -45,7 +45,11 @@ object RedBlackTree { def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) - def range[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = blacken(rng(tree, from, until)) + def range[A, B](tree: Tree[A, B], low: Option[A], lowInclusive: Boolean, high: Option[A], highInclusive: Boolean)(implicit ordering: Ordering[A]): Tree[A, B] = { + val after: Option[A => Boolean] = low.map(key => if (lowInclusive) ordering.lt(_, key) else ordering.lteq(_, key)) + val before: Option[A => Boolean] = high.map(key => if (highInclusive) ordering.lt(key, _) else ordering.lteq(key, _)) + blacken(rng(tree, after, before)) + } def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { if (tree eq null) throw new NoSuchElementException("empty map") @@ -198,13 +202,13 @@ object RedBlackTree { else append(tree.left, tree.right) } - private[this] def rng[A, B](tree: Tree[A, B], from: Option[A], until: Option[A])(implicit ordering: Ordering[A]): Tree[A, B] = { + private[this] def rng[A, B](tree: Tree[A, B], after: Option[A => Boolean], before: Option[A => Boolean])(implicit ordering: Ordering[A]): Tree[A, B] = { if (tree eq null) return null - if (from == None && until == None) return tree - if (from != None && ordering.lt(tree.key, from.get)) return rng(tree.right, from, until); - if (until != None && ordering.lteq(until.get, tree.key)) return rng(tree.left, from, until); - val newLeft = rng(tree.left, from, None) - val newRight = rng(tree.right, None, until) + if (after == None && before == None) return tree + if (after != None && after.get(tree.key)) return rng(tree.right, after, before); + if (before != None && before.get(tree.key)) return rng(tree.left, after, before); + val newLeft = rng(tree.left, after, None) + val newRight = rng(tree.right, None, before) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree else if (newLeft eq null) upd(newRight, tree.key, tree.value); else if (newRight eq null) upd(newLeft, tree.key, tree.value); diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 2bb8a566c6..3eba64dca3 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -62,9 +62,13 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - override def rangeImpl(from : Option[A], until : Option[A]): TreeMap[A,B] = { - val ntree = RB.range(tree, from,until) - new TreeMap[A,B](ntree) + override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = { + val ntree = RB.range(tree, from, true, until, false) + new TreeMap[A, B](ntree) + } + override def to(to: A): TreeMap[A, B] = { + val ntree = RB.range(tree, None, true, Some(to), true) + new TreeMap[A, B](ntree) } override def firstKey = RB.smallest(tree).key diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 8b95358d1c..5dd80e87a4 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -151,7 +151,11 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin override def foreach[U](f: A => U) = RB.foreachKey(tree, f) override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = { - val ntree = RB.range(tree, from, until) + val ntree = RB.range(tree, from, true, until, false) + newSet(ntree) + } + override def to(to: A): TreeSet[A] = { + val ntree = RB.range(tree, None, true, Some(to), true) newSet(ntree) } override def firstKey = head diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala index 34fa8eae8d..14538c2352 100644 --- a/test/files/scalacheck/redblacktree.scala +++ b/test/files/scalacheck/redblacktree.scala @@ -174,36 +174,41 @@ package scala.collection.immutable.redblacktree { object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants { import RB._ - override type ModifyParm = (Option[Int], Option[Int]) + override type ModifyParm = (Option[Int], Boolean, Option[Int], Boolean) override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { from <- choose(0, iterator(tree).size) + fromInclusive <- oneOf(false, true) to <- choose(0, iterator(tree).size) suchThat (from <=) + toInclusive <- oneOf(false, true) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug - } yield (optionalFrom, optionalTo) + } yield (optionalFrom, fromInclusive, optionalTo, toInclusive) override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - range(tree, from, to) + val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) + range(tree, from, parm._2, to, parm._4) } property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - ("lower boundary" |: (from forall ( key => iterator(newTree).map(_._1) forall (key <=)))) && - ("upper boundary" |: (to forall ( key => iterator(newTree).map(_._1) forall (key >)))) + val fromPredicate: String => String => Boolean = if (parm._2) (_ <=) else (_ <) + val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) + val toPredicate: String => String => Boolean = if (parm._4) (_ >=) else (_ >) + ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall fromPredicate(key)))) && + ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall toPredicate(key)))) } property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) - val filteredTree = (iterator(tree) - .map(_._1) - .filter(key => from forall (key >=)) - .filter(key => to forall (key <)) + val fromPredicate: String => String => Boolean = if (parm._2) (_ >=) else (_ >) + val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) + val toPredicate: String => String => Boolean = if (parm._4) (_ <=) else (_ <) + val filteredTree = (keysIterator(tree) + .filter(key => from forall fromPredicate(key)) + .filter(key => to forall toPredicate(key)) .toList) - filteredTree == iterator(newTree).map(_._1).toList + filteredTree == keysIterator(newTree).toList } } } diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala index 7d5f94d58b..ba6d117fd4 100644 --- a/test/files/scalacheck/treemap.scala +++ b/test/files/scalacheck/treemap.scala @@ -111,6 +111,24 @@ object Test extends Properties("TreeMap") { prefix.forall(_._1 < 0) && suffix.forall(_._1 >= 0) && subject == prefix ++ suffix } + property("from is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { + val n = choose(0, subject.size - 1).sample.get + val from = subject.drop(n).firstKey + subject.from(from).firstKey == from && subject.from(from).forall(_._1 >= from) + }} + + property("to is inclusive") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { + val n = choose(0, subject.size - 1).sample.get + val to = subject.drop(n).firstKey + subject.to(to).lastKey == to && subject.to(to).forall(_._1 <= to) + }} + + property("until is exclusive") = forAll { (subject: TreeMap[Int, String]) => subject.size > 1 ==> { + val n = choose(1, subject.size - 1).sample.get + val until = subject.drop(n).firstKey + subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_._1 <= until) + }} + property("remove single") = forAll { (subject: TreeMap[Int, String]) => subject.nonEmpty ==> { val key = oneOf(subject.keys.toSeq).sample.get val removed = subject - key diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala index 7f99aec77e..e6d1b50860 100644 --- a/test/files/scalacheck/treeset.scala +++ b/test/files/scalacheck/treeset.scala @@ -107,6 +107,24 @@ object Test extends Properties("TreeSet") { prefix.forall(_ < 0) && suffix.forall(_ >= 0) && subject == prefix ++ suffix } + property("from is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { + val n = choose(0, subject.size - 1).sample.get + val from = subject.drop(n).firstKey + subject.from(from).firstKey == from && subject.from(from).forall(_ >= from) + }} + + property("to is inclusive") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { + val n = choose(0, subject.size - 1).sample.get + val to = subject.drop(n).firstKey + subject.to(to).lastKey == to && subject.to(to).forall(_ <= to) + }} + + property("until is exclusive") = forAll { (subject: TreeSet[Int]) => subject.size > 1 ==> { + val n = choose(1, subject.size - 1).sample.get + val until = subject.drop(n).firstKey + subject.until(until).lastKey == subject.take(n).lastKey && subject.until(until).forall(_ <= until) + }} + property("remove single") = forAll { (subject: TreeSet[Int]) => subject.nonEmpty ==> { val element = oneOf(subject.toSeq).sample.get val removed = subject - element -- cgit v1.2.3 From b0fc4958a53500a329be4831f47e79f64074a5f1 Mon Sep 17 00:00:00 2001 From: Lucien Pereira Date: Sun, 15 Jan 2012 16:40:16 +0100 Subject: Getting rid of closure creation occuring for each rebalancing. Tail recursion is not necessary here. --- src/library/scala/collection/mutable/AVLTree.scala | 62 ++++++++++------------ 1 file changed, 28 insertions(+), 34 deletions(-) diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index 0cf6cb06e5..f0a6c690b6 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -9,7 +9,6 @@ package scala.collection package mutable -import annotation.tailrec /** * An immutable AVL Tree implementation used by mutable.TreeSet @@ -45,21 +44,16 @@ private[mutable] object AVLTree { * Thows an IllegalArgumentException if element is already present. * */ - def insert[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): AVLTree[A] = { - @tailrec - def insertTC(value: A, tree: AVLTree[A], reassemble: AVLTree[A] => AVLTree[A]): AVLTree[A] = tree match { - case Leaf => reassemble(Node(value, Leaf, Leaf)) - - case Node(a, left, right) => if (0 == ordering.compare(value, a)) { - throw new IllegalArgumentException() - } else if (-1 == ordering.compare(value, a)) { - insertTC(value, left, x => reassemble(rebalance(Node(a, x, right)))) - } else { - insertTC(value, right, x => reassemble(rebalance(Node(a, left, x)))) - } - } + def insert[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): AVLTree[A] = tree match { + case Leaf => Node(value, Leaf, Leaf) - insertTC(value, tree, x => rebalance(x)) + case Node(a, left, right) => if (0 == ordering.compare(value, a)) { + throw new IllegalArgumentException() + } else if (-1 == ordering.compare(value, a)) { + rebalance(Node(a, insert(value, left, ordering), right)) + } else { + rebalance(Node(a, left, insert(value, right, ordering))) + } } def contains[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): Boolean = tree match { @@ -96,7 +90,7 @@ private[mutable] object AVLTree { rebalance(Node(a, left, remove(value, right, ordering))) } - case Node(a, left@Node(_, _, _), right) => if (0 == ordering.compare(value, a)) { + case Node(a, left: Node[A], right) => if (0 == ordering.compare(value, a)) { val (max, newLeft) = removeMax(left) rebalance(Node(max, newLeft, right)) } else if (-1 == ordering.compare(value, a)) { @@ -111,17 +105,17 @@ private[mutable] object AVLTree { * and a new tree from which this element has been extracted. * */ - def removeMax[A](tree: Node[A]): (A, AVLTree[A]) = { - @tailrec - def removeMaxTC(tree: AVLTree[A], assemble: (A, AVLTree[A]) => (A, AVLTree[A])): (A, AVLTree[A]) = tree match { - case Node(a, Leaf, Leaf) => assemble(a, Leaf) - case Node(a, left, Leaf) => assemble(a, left) - case Node(a, left, right) => removeMaxTC(right, - (max: A, avl: AVLTree[A]) => assemble(max, rebalance(Node(a, left, avl)))) - case Leaf => sys.error("Should not happen.") + def removeMax[A](tree: AVLTree[A]): (A, AVLTree[A]) = tree match { + case Node(a, Leaf, Leaf) => (a, Leaf) + + case Node(a, left, Leaf) => (a, left) + + case Node(a, left, right) => { + val (max, newRight) = removeMax(right) + (max, rebalance(Node(a, left, newRight))) } - removeMaxTC(tree, (a, b) => (a, b)) + case Leaf => sys.error("Should not happen.") } /** @@ -129,17 +123,17 @@ private[mutable] object AVLTree { * and a new tree from which this element has been extracted. * */ - def removeMin[A](tree: Node[A]): (A, AVLTree[A]) = { - @tailrec - def removeMinTC(tree: AVLTree[A], assemble: (A, AVLTree[A]) => (A, AVLTree[A])): (A, AVLTree[A]) = tree match { - case Node(a, Leaf, Leaf) => assemble(a, Leaf) - case Node(a, Leaf, right) => assemble(a, right) - case Node(a, left, right) => removeMinTC(left, - (min: A, avl: AVLTree[A]) => assemble(min, rebalance(Node(a, avl, right)))) - case Leaf => sys.error("Should not happen.") + def removeMin[A](tree: AVLTree[A]): (A, AVLTree[A]) = tree match { + case Node(a, Leaf, Leaf) => (a, Leaf) + + case Node(a, Leaf, right) => (a, right) + + case Node(a, left, right) => { + val (min, newLeft) = removeMin(left) + (min, rebalance(Node(a, newLeft, right))) } - removeMinTC(tree, (a, b) => (a, b)) + case Leaf => sys.error("Should not happen.") } /** -- cgit v1.2.3 From 7824dbd3cfe6704ab56aa5ceb2af2f5f4e55cbc7 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sat, 21 Jan 2012 22:55:59 +0100 Subject: Custom coded version of range/from/to/until. This avoids unnecessary allocation of Option and Function objects, mostly helping performance of small trees. --- .../scala/collection/immutable/RedBlackTree.scala | 48 +++++++++++++++++----- .../scala/collection/immutable/TreeMap.scala | 13 +++--- .../scala/collection/immutable/TreeSet.scala | 14 +++---- test/files/scalacheck/redblacktree.scala | 26 +++++------- 4 files changed, 59 insertions(+), 42 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index d8caeab096..7110ca4194 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -45,11 +45,16 @@ object RedBlackTree { def count(tree: Tree[_, _]) = if (tree eq null) 0 else tree.count def update[A, B, B1 >: B](tree: Tree[A, B], k: A, v: B1)(implicit ordering: Ordering[A]): Tree[A, B1] = blacken(upd(tree, k, v)) def delete[A, B](tree: Tree[A, B], k: A)(implicit ordering: Ordering[A]): Tree[A, B] = blacken(del(tree, k)) - def range[A, B](tree: Tree[A, B], low: Option[A], lowInclusive: Boolean, high: Option[A], highInclusive: Boolean)(implicit ordering: Ordering[A]): Tree[A, B] = { - val after: Option[A => Boolean] = low.map(key => if (lowInclusive) ordering.lt(_, key) else ordering.lteq(_, key)) - val before: Option[A => Boolean] = high.map(key => if (highInclusive) ordering.lt(key, _) else ordering.lteq(key, _)) - blacken(rng(tree, after, before)) + def rangeImpl[A: Ordering, B](tree: Tree[A, B], from: Option[A], until: Option[A]): Tree[A, B] = (from, until) match { + case (Some(from), Some(until)) => this.range(tree, from, until) + case (Some(from), None) => this.from(tree, from) + case (None, Some(until)) => this.until(tree, until) + case (None, None) => tree } + def range[A: Ordering, B](tree: Tree[A, B], from: A, until: A): Tree[A, B] = blacken(doRange(tree, from, until)) + def from[A: Ordering, B](tree: Tree[A, B], from: A): Tree[A, B] = blacken(doFrom(tree, from)) + def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) + def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { if (tree eq null) throw new NoSuchElementException("empty map") @@ -202,13 +207,36 @@ object RedBlackTree { else append(tree.left, tree.right) } - private[this] def rng[A, B](tree: Tree[A, B], after: Option[A => Boolean], before: Option[A => Boolean])(implicit ordering: Ordering[A]): Tree[A, B] = { + private[this] def doFrom[A, B](tree: Tree[A, B], from: A)(implicit ordering: Ordering[A]): Tree[A, B] = { if (tree eq null) return null - if (after == None && before == None) return tree - if (after != None && after.get(tree.key)) return rng(tree.right, after, before); - if (before != None && before.get(tree.key)) return rng(tree.left, after, before); - val newLeft = rng(tree.left, after, None) - val newRight = rng(tree.right, None, before) + if (ordering.lt(tree.key, from)) return doFrom(tree.right, from) + val newLeft = doFrom(tree.left, from) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value) + else rebalance(tree, newLeft, tree.right) + } + private[this] def doTo[A, B](tree: Tree[A, B], to: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(to, tree.key)) return doTo(tree.left, to) + val newRight = doTo(tree.right, to) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else rebalance(tree, tree.left, newRight) + } + private[this] def doUntil[A, B](tree: Tree[A, B], until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lteq(until, tree.key)) return doUntil(tree.left, until) + val newRight = doUntil(tree.right, until) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else rebalance(tree, tree.left, newRight) + } + private[this] def doRange[A, B](tree: Tree[A, B], from: A, until: A)(implicit ordering: Ordering[A]): Tree[A, B] = { + if (tree eq null) return null + if (ordering.lt(tree.key, from)) return doRange(tree.right, from, until); + if (ordering.lteq(until, tree.key)) return doRange(tree.left, from, until); + val newLeft = doFrom(tree.left, from) + val newRight = doUntil(tree.right, until) if ((newLeft eq tree.left) && (newRight eq tree.right)) tree else if (newLeft eq null) upd(newRight, tree.key, tree.value); else if (newRight eq null) upd(newLeft, tree.key, tree.value); diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index 3eba64dca3..a24221decc 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -62,14 +62,11 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi def this()(implicit ordering: Ordering[A]) = this(null)(ordering) - override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = { - val ntree = RB.range(tree, from, true, until, false) - new TreeMap[A, B](ntree) - } - override def to(to: A): TreeMap[A, B] = { - val ntree = RB.range(tree, None, true, Some(to), true) - new TreeMap[A, B](ntree) - } + override def rangeImpl(from: Option[A], until: Option[A]): TreeMap[A, B] = new TreeMap[A, B](RB.rangeImpl(tree, from, until)) + override def range(from: A, until: A): TreeMap[A, B] = new TreeMap[A, B](RB.range(tree, from, until)) + override def from(from: A): TreeMap[A, B] = new TreeMap[A, B](RB.from(tree, from)) + override def to(to: A): TreeMap[A, B] = new TreeMap[A, B](RB.to(tree, to)) + override def until(until: A): TreeMap[A, B] = new TreeMap[A, B](RB.until(tree, until)) override def firstKey = RB.smallest(tree).key override def lastKey = RB.greatest(tree).key diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index 5dd80e87a4..e21aec362c 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -150,14 +150,12 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin override def foreach[U](f: A => U) = RB.foreachKey(tree, f) - override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = { - val ntree = RB.range(tree, from, true, until, false) - newSet(ntree) - } - override def to(to: A): TreeSet[A] = { - val ntree = RB.range(tree, None, true, Some(to), true) - newSet(ntree) - } + override def rangeImpl(from: Option[A], until: Option[A]): TreeSet[A] = newSet(RB.rangeImpl(tree, from, until)) + override def range(from: A, until: A): TreeSet[A] = newSet(RB.range(tree, from, until)) + override def from(from: A): TreeSet[A] = newSet(RB.from(tree, from)) + override def to(to: A): TreeSet[A] = newSet(RB.to(tree, to)) + override def until(until: A): TreeSet[A] = newSet(RB.until(tree, until)) + override def firstKey = head override def lastKey = last } diff --git a/test/files/scalacheck/redblacktree.scala b/test/files/scalacheck/redblacktree.scala index 14538c2352..e4b356c889 100644 --- a/test/files/scalacheck/redblacktree.scala +++ b/test/files/scalacheck/redblacktree.scala @@ -174,39 +174,33 @@ package scala.collection.immutable.redblacktree { object TestRange extends RedBlackTreeTest with RedBlackTreeInvariants { import RB._ - override type ModifyParm = (Option[Int], Boolean, Option[Int], Boolean) + override type ModifyParm = (Option[Int], Option[Int]) override def genParm(tree: Tree[String, Int]): Gen[ModifyParm] = for { from <- choose(0, iterator(tree).size) - fromInclusive <- oneOf(false, true) to <- choose(0, iterator(tree).size) suchThat (from <=) - toInclusive <- oneOf(false, true) optionalFrom <- oneOf(Some(from), None, Some(from)) // Double Some(n) to get around a bug optionalTo <- oneOf(Some(to), None, Some(to)) // Double Some(n) to get around a bug - } yield (optionalFrom, fromInclusive, optionalTo, toInclusive) + } yield (optionalFrom, optionalTo) override def modify(tree: Tree[String, Int], parm: ModifyParm): Tree[String, Int] = { val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) - range(tree, from, parm._2, to, parm._4) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) + rangeImpl(tree, from, to) } property("range boundaries respected") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val fromPredicate: String => String => Boolean = if (parm._2) (_ <=) else (_ <) - val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) - val toPredicate: String => String => Boolean = if (parm._4) (_ >=) else (_ >) - ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall fromPredicate(key)))) && - ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall toPredicate(key)))) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) + ("lower boundary" |: (from forall ( key => keysIterator(newTree) forall (key <=)))) && + ("upper boundary" |: (to forall ( key => keysIterator(newTree) forall (key >)))) } property("range returns all elements") = forAll(genInput) { case (tree, parm, newTree) => val from = parm._1 flatMap (nodeAt(tree, _) map (_._1)) - val fromPredicate: String => String => Boolean = if (parm._2) (_ >=) else (_ >) - val to = parm._3 flatMap (nodeAt(tree, _) map (_._1)) - val toPredicate: String => String => Boolean = if (parm._4) (_ <=) else (_ <) + val to = parm._2 flatMap (nodeAt(tree, _) map (_._1)) val filteredTree = (keysIterator(tree) - .filter(key => from forall fromPredicate(key)) - .filter(key => to forall toPredicate(key)) + .filter(key => from forall (key >=)) + .filter(key => to forall (key <)) .toList) filteredTree == keysIterator(newTree).toList } -- cgit v1.2.3 From 78374f340e71d8e8f71c5bcd11452b72c207068c Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Sun, 22 Jan 2012 21:17:29 +0100 Subject: Custom implementations of drop/take/slice. This mainly helps performance when comparing keys is expensive. --- .../scala/collection/immutable/RedBlackTree.scala | 39 +++++++++++++++++++++- .../scala/collection/immutable/TreeMap.scala | 6 ++-- .../scala/collection/immutable/TreeSet.scala | 6 ++-- test/files/scalacheck/treemap.scala | 18 ++++++++-- test/files/scalacheck/treeset.scala | 18 ++++++++-- 5 files changed, 75 insertions(+), 12 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 7110ca4194..731a0f7975 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -56,6 +56,10 @@ object RedBlackTree { def to[A: Ordering, B](tree: Tree[A, B], to: A): Tree[A, B] = blacken(doTo(tree, to)) def until[A: Ordering, B](tree: Tree[A, B], key: A): Tree[A, B] = blacken(doUntil(tree, key)) + def drop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doDrop(tree, n)) + def take[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = blacken(doTake(tree, n)) + def slice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = blacken(doSlice(tree, from, until)) + def smallest[A, B](tree: Tree[A, B]): Tree[A, B] = { if (tree eq null) throw new NoSuchElementException("empty map") var result = tree @@ -86,7 +90,7 @@ object RedBlackTree { @tailrec def nth[A, B](tree: Tree[A, B], n: Int): Tree[A, B] = { - val count = RedBlackTree.count(tree.left) + val count = this.count(tree.left) if (n < count) nth(tree.left, n) else if (n > count) nth(tree.right, n - count - 1) else tree @@ -243,6 +247,39 @@ object RedBlackTree { else rebalance(tree, newLeft, newRight) } + private[this] def doDrop[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + if (n <= 0) return tree + if (n >= this.count(tree)) return null + val count = this.count(tree.left) + if (n > count) return doDrop(tree.right, n - count - 1) + val newLeft = doDrop(tree.left, n) + if (newLeft eq tree.left) tree + else if (newLeft eq null) upd(tree.right, tree.key, tree.value) + else rebalance(tree, newLeft, tree.right) + } + private[this] def doTake[A: Ordering, B](tree: Tree[A, B], n: Int): Tree[A, B] = { + if (n <= 0) return null + if (n >= this.count(tree)) return tree + val count = this.count(tree.left) + if (n <= count) return doTake(tree.left, n) + val newRight = doTake(tree.right, n - count - 1) + if (newRight eq tree.right) tree + else if (newRight eq null) upd(tree.left, tree.key, tree.value) + else rebalance(tree, tree.left, newRight) + } + private[this] def doSlice[A: Ordering, B](tree: Tree[A, B], from: Int, until: Int): Tree[A, B] = { + if (tree eq null) return null + val count = this.count(tree.left) + if (from > count) return doSlice(tree.right, from - count - 1, until - count - 1) + if (until <= count) return doSlice(tree.left, from, until) + val newLeft = doDrop(tree.left, from) + val newRight = doTake(tree.right, until - count - 1) + if ((newLeft eq tree.left) && (newRight eq tree.right)) tree + else if (newLeft eq null) upd(newRight, tree.key, tree.value) + else if (newRight eq null) upd(newLeft, tree.key, tree.value) + else rebalance(tree, newLeft, newRight) + } + // The zipper returned might have been traversed left-most (always the left child) // or right-most (always the right child). Left trees are traversed right-most, // and right trees are traversed leftmost. diff --git a/src/library/scala/collection/immutable/TreeMap.scala b/src/library/scala/collection/immutable/TreeMap.scala index a24221decc..dc4f79be35 100644 --- a/src/library/scala/collection/immutable/TreeMap.scala +++ b/src/library/scala/collection/immutable/TreeMap.scala @@ -89,20 +89,20 @@ class TreeMap[A, +B] private (tree: RB.Tree[A, B])(implicit val ordering: Orderi override def drop(n: Int) = { if (n <= 0) this else if (n >= size) empty - else from(RB.nth(tree, n).key) + else new TreeMap(RB.drop(tree, n)) } override def take(n: Int) = { if (n <= 0) empty else if (n >= size) this - else until(RB.nth(tree, n).key) + else new TreeMap(RB.take(tree, n)) } override def slice(from: Int, until: Int) = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else range(RB.nth(tree, from).key, RB.nth(tree, until).key) + else new TreeMap(RB.slice(tree, from, until)) } override def dropRight(n: Int) = take(size - n) diff --git a/src/library/scala/collection/immutable/TreeSet.scala b/src/library/scala/collection/immutable/TreeSet.scala index e21aec362c..1b3d72ceb7 100644 --- a/src/library/scala/collection/immutable/TreeSet.scala +++ b/src/library/scala/collection/immutable/TreeSet.scala @@ -66,20 +66,20 @@ class TreeSet[A] private (tree: RB.Tree[A, Unit])(implicit val ordering: Orderin override def drop(n: Int) = { if (n <= 0) this else if (n >= size) empty - else from(RB.nth(tree, n).key) + else newSet(RB.drop(tree, n)) } override def take(n: Int) = { if (n <= 0) empty else if (n >= size) this - else until(RB.nth(tree, n).key) + else newSet(RB.take(tree, n)) } override def slice(from: Int, until: Int) = { if (until <= from) empty else if (from <= 0) take(until) else if (until >= size) drop(from) - else range(RB.nth(tree, from).key, RB.nth(tree, until).key) + else newSet(RB.slice(tree, from, until)) } override def dropRight(n: Int) = take(size - n) diff --git a/test/files/scalacheck/treemap.scala b/test/files/scalacheck/treemap.scala index ba6d117fd4..f672637c57 100644 --- a/test/files/scalacheck/treemap.scala +++ b/test/files/scalacheck/treemap.scala @@ -7,11 +7,12 @@ import util._ import Buildable._ object Test extends Properties("TreeMap") { - implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary]: Arbitrary[TreeMap[A, B]] = - Arbitrary(for { + def genTreeMap[A: Arbitrary: Ordering, B: Arbitrary]: Gen[TreeMap[A, B]] = + for { keys <- listOf(arbitrary[A]) values <- listOfN(keys.size, arbitrary[B]) - } yield TreeMap(keys zip values: _*)) + } yield TreeMap(keys zip values: _*) + implicit def arbTreeMap[A : Arbitrary : Ordering, B : Arbitrary] = Arbitrary(genTreeMap[A, B]) property("foreach/iterator consistency") = forAll { (subject: TreeMap[Int, String]) => val it = subject.iterator @@ -96,6 +97,17 @@ object Test extends Properties("TreeMap") { prefix == subject.take(n) && suffix == subject.drop(n) } + def genSliceParms = for { + tree <- genTreeMap[Int, String] + from <- choose(0, tree.size) + until <- choose(from, tree.size) + } yield (tree, from, until) + + property("slice") = forAll(genSliceParms) { case (subject, from, until) => + val slice = subject.slice(from, until) + slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until) + } + property("takeWhile") = forAll { (subject: TreeMap[Int, String]) => val result = subject.takeWhile(_._1 < 0) result.forall(_._1 < 0) && result == subject.take(result.size) diff --git a/test/files/scalacheck/treeset.scala b/test/files/scalacheck/treeset.scala index e6d1b50860..98e38c8219 100644 --- a/test/files/scalacheck/treeset.scala +++ b/test/files/scalacheck/treeset.scala @@ -6,8 +6,11 @@ import Arbitrary._ import util._ object Test extends Properties("TreeSet") { - implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = - Arbitrary(listOf(arbitrary[A]) map (elements => TreeSet(elements: _*))) + def genTreeSet[A: Arbitrary: Ordering]: Gen[TreeSet[A]] = + for { + elements <- listOf(arbitrary[A]) + } yield TreeSet(elements: _*) + implicit def arbTreeSet[A : Arbitrary : Ordering]: Arbitrary[TreeSet[A]] = Arbitrary(genTreeSet) property("foreach/iterator consistency") = forAll { (subject: TreeSet[Int]) => val it = subject.iterator @@ -92,6 +95,17 @@ object Test extends Properties("TreeSet") { prefix == subject.take(n) && suffix == subject.drop(n) } + def genSliceParms = for { + tree <- genTreeSet[Int] + from <- choose(0, tree.size) + until <- choose(from, tree.size) + } yield (tree, from, until) + + property("slice") = forAll(genSliceParms) { case (subject, from, until) => + val slice = subject.slice(from, until) + slice.size == until - from && subject.toSeq == subject.take(from).toSeq ++ slice ++ subject.drop(until) + } + property("takeWhile") = forAll { (subject: TreeSet[Int]) => val result = subject.takeWhile(_ < 0) result.forall(_ < 0) && result == subject.take(result.size) -- cgit v1.2.3 From af164c5686d5cad04f69594d37f6f72d456546de Mon Sep 17 00:00:00 2001 From: aleksandar Date: Tue, 24 Jan 2012 18:39:20 +0100 Subject: Update for fix for SI-5377. Converting the buffer to another arraybuffer instead of to a list. --- src/library/scala/collection/SeqLike.scala | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/SeqLike.scala b/src/library/scala/collection/SeqLike.scala index 6d84b4276b..02298ef096 100644 --- a/src/library/scala/collection/SeqLike.scala +++ b/src/library/scala/collection/SeqLike.scala @@ -151,8 +151,9 @@ trait SeqLike[+A, +Repr] extends IterableLike[A, Repr] with GenSeqLike[A, Repr] def next(): Repr = { if (!hasNext) Iterator.empty.next - - val result = (self.newBuilder ++= elms.toList).result + + val forcedElms = new mutable.ArrayBuffer[A](elms.size) ++= elms + val result = (self.newBuilder ++= forcedElms).result var i = idxs.length - 2 while(i >= 0 && idxs(i) >= idxs(i+1)) i -= 1 -- cgit v1.2.3 From 51667dc039936975dc554c8d50509ef4f9f3b845 Mon Sep 17 00:00:00 2001 From: Erik Rozendaal Date: Tue, 24 Jan 2012 19:54:38 +0100 Subject: Removed TODOs. --- src/library/scala/collection/immutable/RedBlackTree.scala | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/src/library/scala/collection/immutable/RedBlackTree.scala b/src/library/scala/collection/immutable/RedBlackTree.scala index 731a0f7975..0f28c4997b 100644 --- a/src/library/scala/collection/immutable/RedBlackTree.scala +++ b/src/library/scala/collection/immutable/RedBlackTree.scala @@ -166,7 +166,7 @@ object RedBlackTree { } else if (isRedTree(tr) && isBlackTree(tr.left)) { RedTree(tr.left.key, tr.left.value, BlackTree(x, xv, tl, tr.left.left), balance(tr.key, tr.value, tr.left.right, subl(tr.right))) } else { - sys.error("Defect: invariance violation at ") // TODO + sys.error("Defect: invariance violation") } def balRight(x: A, xv: B, tl: Tree[A, B], tr: Tree[A, B]) = if (isRedTree(tr)) { RedTree(x, xv, tl, tr.black) @@ -175,7 +175,7 @@ object RedBlackTree { } else if (isRedTree(tl) && isBlackTree(tl.right)) { RedTree(tl.right.key, tl.right.value, balance(tl.key, tl.value, subl(tl.left), tl.right.left), BlackTree(x, xv, tl.right.right, tr)) } else { - sys.error("Defect: invariance violation at ") // TODO + sys.error("Defect: invariance violation") } def delLeft = if (isBlackTree(tree.left)) balLeft(tree.key, tree.value, del(tree.left, k), tree.right) else RedTree(tree.key, tree.value, del(tree.left, k), tree.right) def delRight = if (isBlackTree(tree.right)) balRight(tree.key, tree.value, tree.left, del(tree.right, k)) else RedTree(tree.key, tree.value, tree.left, del(tree.right, k)) -- cgit v1.2.3 From 0cbe801dc5ab29f484d2e2f531cba1305823dcce Mon Sep 17 00:00:00 2001 From: Leif Wickland Date: Tue, 24 Jan 2012 15:35:52 -0700 Subject: SI-5405: Fix documentation error in scala.math.BigInt --- src/library/scala/math/BigInt.scala | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/library/scala/math/BigInt.scala b/src/library/scala/math/BigInt.scala index 361e02cb16..8a53afaa62 100644 --- a/src/library/scala/math/BigInt.scala +++ b/src/library/scala/math/BigInt.scala @@ -309,7 +309,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo override def byteValue = intValue.toByte /** Converts this BigInt to a short. - * If the BigInt is too big to fit in a byte, only the low-order 16 bits are returned. + * If the BigInt is too big to fit in a short, only the low-order 16 bits are returned. * Note that this conversion can lose information about the overall magnitude of the * BigInt value as well as return a result with the opposite sign. */ @@ -323,7 +323,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo def charValue = intValue.toChar /** Converts this BigInt to an int. - * If the BigInt is too big to fit in a char, only the low-order 32 bits + * If the BigInt is too big to fit in a int, only the low-order 32 bits * are returned. Note that this conversion can lose information about the * overall magnitude of the BigInt value as well as return a result with * the opposite sign. @@ -331,7 +331,7 @@ class BigInt(val bigInteger: BigInteger) extends ScalaNumber with ScalaNumericCo def intValue = this.bigInteger.intValue /** Converts this BigInt to a long. - * If the BigInt is too big to fit in a char, only the low-order 64 bits + * If the BigInt is too big to fit in a long, only the low-order 64 bits * are returned. Note that this conversion can lose information about the * overall magnitude of the BigInt value as well as return a result with * the opposite sign. -- cgit v1.2.3 From c800d1fec5241ed8c29e5af30465856f9b583246 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Wed, 25 Jan 2012 11:33:53 +0100 Subject: Use context for buffering errors that cannot/shouldn't be reported in the given moment (instead of throwing type errors). This avoids previous problems where we were creating fake error trees in some incorrect places like in type completers in Namers etc. Implicits relied heavily on type errors being thrown but performance should stay the same due to some explicit checks/returns. Some of the problems involved how ambiguous error messages were collected/reported because it was very random (similarly for divergent implicits). This should be more explicit now. Reduced the number of unnecessary cyclic references being thrown (apart from those in Symbols/Types which don't have a context and need to stay for now as is). Review by @paulp, @odersky. --- src/compiler/scala/reflect/internal/Symbols.scala | 9 +- .../scala/reflect/internal/TreePrinters.scala | 1 - src/compiler/scala/reflect/internal/Trees.scala | 3 + src/compiler/scala/reflect/internal/Types.scala | 47 +- .../scala/tools/nsc/ast/TreeBrowsers.scala | 5 +- src/compiler/scala/tools/nsc/ast/Trees.scala | 6 +- .../scala/tools/nsc/interactive/Global.scala | 2 + src/compiler/scala/tools/nsc/plugins/Plugins.scala | 2 +- .../scala/tools/nsc/symtab/classfile/Pickler.scala | 2 +- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../scala/tools/nsc/typechecker/Analyzer.scala | 1 + .../tools/nsc/typechecker/ContextErrors.scala | 1052 +++++++++++++++++ .../scala/tools/nsc/typechecker/Contexts.scala | 129 ++- .../scala/tools/nsc/typechecker/Implicits.scala | 127 +- .../scala/tools/nsc/typechecker/Infer.scala | 338 ++---- .../scala/tools/nsc/typechecker/Macros.scala | 42 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 15 +- .../scala/tools/nsc/typechecker/Namers.scala | 106 +- .../tools/nsc/typechecker/NamesDefaults.scala | 30 +- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 27 +- .../tools/nsc/typechecker/SuperAccessors.scala | 13 +- .../tools/nsc/typechecker/TypeDiagnostics.scala | 108 +- .../scala/tools/nsc/typechecker/Typers.scala | 1213 ++++++++++---------- src/library/scala/reflect/api/Trees.scala | 4 +- test/files/buildmanager/t2790/t2790.check | 1 - test/files/neg/sensitive2.check | 10 + test/files/neg/sensitive2.scala | 8 + test/files/neg/t1878.check | 5 +- test/files/neg/t2641.check | 20 +- test/files/neg/t2918.check | 6 +- test/files/neg/t2918.scala | 2 +- test/files/neg/t3015.check | 7 +- test/files/neg/t649.check | 2 +- 34 files changed, 2149 insertions(+), 1198 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala create mode 100644 test/files/neg/sensitive2.check create mode 100644 test/files/neg/sensitive2.scala diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index a943b6fe24..6b4080c6ad 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -345,22 +345,26 @@ trait Symbols extends api.Symbols { self: SymbolTable => } // Lock a symbol, using the handler if the recursion depth becomes too great. - def lock(handler: => Unit) = { + def lock(handler: => Unit): Boolean = { if ((rawflags & LOCKED) != 0L) { if (settings.Yrecursion.value != 0) { recursionTable get this match { case Some(n) => if (n > settings.Yrecursion.value) { handler + false } else { recursionTable += (this -> (n + 1)) + true } case None => recursionTable += (this -> 1) + true } - } else { handler } + } else { handler; false } } else { rawflags |= LOCKED + true // activeLocks += 1 // lockedSyms += this } @@ -963,7 +967,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => phase = phaseOf(infos.validFrom) tp.complete(this) } finally { - // if (id == 431) println("completer ran "+tp.getClass+" for "+fullName) unlock() phase = current } diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala index dcc395ddd2..3a0717d344 100644 --- a/src/compiler/scala/reflect/internal/TreePrinters.scala +++ b/src/compiler/scala/reflect/internal/TreePrinters.scala @@ -397,7 +397,6 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable => // case SelectFromArray(qualifier, name, _) => // print(qualifier); print("."); print(symName(tree, name)) - case tree => xprintTree(this, tree) } diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index a2c55a89d6..5bb0c98bfb 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -129,6 +129,9 @@ trait Trees extends api.Trees { self: SymbolTable => } def shallowDuplicate: Tree = new ShallowDuplicator(tree) transform tree def shortClass: String = tree.getClass.getName split "[.$]" last + + def isErrorTyped = (tree.tpe ne null) && tree.tpe.isError + /** When you want to know a little more than the class, but a lot * less than the whole tree. */ diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 1df60f32d9..8ca00dd5c5 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1388,7 +1388,7 @@ trait Types extends api.Types { self: SymbolTable => //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG } if (baseTypeSeqCache eq undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + typeSymbol) + throw new RecoverableCyclicReference(typeSymbol) baseTypeSeqCache } @@ -1430,7 +1430,7 @@ trait Types extends api.Types { self: SymbolTable => } } if (baseClassesCache eq null) - throw new TypeError("illegal cyclic reference involving " + typeSymbol) + throw new RecoverableCyclicReference(typeSymbol) baseClassesCache } @@ -1946,7 +1946,7 @@ trait Types extends api.Types { self: SymbolTable => // If a subtyping cycle is not detected here, we'll likely enter an infinite // loop before a sensible error can be issued. SI-5093 is one example. case x: SubType if x.supertype eq this => - throw new TypeError("illegal cyclic reference involving " + sym) + throw new RecoverableCyclicReference(sym) case tp => tp } } @@ -2064,7 +2064,7 @@ trait Types extends api.Types { self: SymbolTable => } } if (baseTypeSeqCache == undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + sym) + throw new RecoverableCyclicReference(sym) baseTypeSeqCache } @@ -2074,11 +2074,11 @@ trait Types extends api.Types { self: SymbolTable => else pre.prefixString ) private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]") - private def refinementString = ( + def refinementString = ( if (sym.isStructuralRefinement) ( decls filter (sym => sym.isPossibleInRefinement && sym.isPublic) map (_.defString) - mkString(" {", "; ", "}") + mkString("{", "; ", "}") ) else "" ) @@ -2498,7 +2498,7 @@ trait Types extends api.Types { self: SymbolTable => if (args.isEmpty && params.isEmpty) new TypeVar(origin, constr) else if (args.size == params.size) new AppliedTypeVar(origin, constr, params zip args) else if (args.isEmpty) new HKTypeVar(origin, constr, params) - else throw new TypeError("Invalid TypeVar construction: " + ((origin, constr, args, params))) + else throw new Error("Invalid TypeVar construction: " + ((origin, constr, args, params))) ) trace("create", "In " + tv.originLocation)(tv) @@ -2599,7 +2599,7 @@ trait Types extends api.Types { self: SymbolTable => TypeVar.trace("applyArgs", "In " + originLocation + ", apply args " + newArgs.mkString(", ") + " to " + originName)(tv) } else - throw new TypeError("Invalid type application in TypeVar: " + params + ", " + newArgs) + throw new Error("Invalid type application in TypeVar: " + params + ", " + newArgs) ) // newArgs.length may differ from args.length (could've been empty before) // @@ -3079,7 +3079,7 @@ trait Types extends api.Types { self: SymbolTable => // don't expand cyclical type alias // we require that object is initialized, thus info.typeParams instead of typeParams. if (sym1.isAliasType && sameLength(sym1.info.typeParams, args) && !sym1.lockOK) - throw new TypeError("illegal cyclic reference involving " + sym1) + throw new RecoverableCyclicReference(sym1) val pre1 = pre match { case x: SuperType if sym1.isEffectivelyFinal || sym1.isDeferred => @@ -3101,7 +3101,7 @@ trait Types extends api.Types { self: SymbolTable => def copyTypeRef(tp: Type, pre: Type, sym: Symbol, args: List[Type]): Type = tp match { case TypeRef(pre0, sym0, _) if pre == pre0 && sym0.name == sym.name => if (sym.isAliasType && sameLength(sym.info.typeParams, args) && !sym.lockOK) - throw new TypeError("illegal cyclic reference involving " + sym) + throw new RecoverableCyclicReference(sym) TypeRef(pre, sym, args) case _ => @@ -3985,15 +3985,17 @@ trait Types extends api.Types { self: SymbolTable => else instParamRelaxed(ps.tail, as.tail) //Console.println("instantiating " + sym + " from " + basesym + " with " + basesym.typeParams + " and " + baseargs+", pre = "+pre+", symclazz = "+symclazz);//DEBUG - if (sameLength(basesym.typeParams, baseargs)) { + if (sameLength(basesym.typeParams, baseargs)) instParam(basesym.typeParams, baseargs) - } else { - throw new TypeError( - "something is wrong (wrong class file?): "+basesym+ - " with type parameters "+ - basesym.typeParams.map(_.name).mkString("[",",","]")+ - " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase) - } + else + if (symclazz.tpe.parents.exists(_.isErroneous)) + ErrorType // don't be to overzealous with throwing exceptions, see #2641 + else + throw new Error( + "something is wrong (wrong class file?): "+basesym+ + " with type parameters "+ + basesym.typeParams.map(_.name).mkString("[",",","]")+ + " gets applied to arguments "+baseargs.mkString("[",",","]")+", phase = "+phase) case ExistentialType(tparams, qtpe) => capturedSkolems = capturedSkolems union tparams toInstance(qtpe, clazz) @@ -6278,6 +6280,12 @@ trait Types extends api.Types { self: SymbolTable => def this(msg: String) = this(NoPosition, msg) } + // TODO: RecoverableCyclicReference should be separated from TypeError, + // but that would be a big change. Left for further refactoring. + /** An exception for cyclic references from which we can recover */ + case class RecoverableCyclicReference(sym: Symbol) + extends TypeError("illegal cyclic reference involving " + sym) + class NoCommonType(tps: List[Type]) extends Throwable( "lub/glb of incompatible types: " + tps.mkString("", " and ", "")) with ControlThrowable @@ -6286,9 +6294,6 @@ trait Types extends api.Types { self: SymbolTable => def this(pre: Type, tp: String) = this("malformed type: " + pre + "#" + tp) } - /** An exception signalling a variance annotation/usage conflict */ - class VarianceError(msg: String) extends TypeError(msg) - /** The current indentation string for traces */ private var indent: String = "" diff --git a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala index 7b5de1f3dd..c1d6c1a4d4 100644 --- a/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala +++ b/src/compiler/scala/tools/nsc/ast/TreeBrowsers.scala @@ -33,17 +33,16 @@ abstract class TreeBrowsers { val borderSize = 10 - def create(): SwingBrowser = new SwingBrowser(); /** Pseudo tree class, so that all JTree nodes are treated uniformly */ case class ProgramTree(units: List[UnitTree]) extends Tree { - override def toString(): String = "Program" + override def toString: String = "Program" } /** Pseudo tree class, so that all JTree nodes are treated uniformly */ case class UnitTree(unit: CompilationUnit) extends Tree { - override def toString(): String = unit.toString() + override def toString: String = unit.toString } /** diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 88a9b5e18b..3a2c5f61b2 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -17,7 +17,6 @@ import scala.reflect.internal.Flags.TRAIT trait Trees extends reflect.internal.Trees { self: Global => // --- additional cases -------------------------------------------------------- - /** Only used during parsing */ case class Parens(args: List[Tree]) extends Tree @@ -31,7 +30,6 @@ trait Trees extends reflect.internal.Trees { self: Global => override def isType = definition.isType } - /** Either an assignment or a named argument. Only appears in argument lists, * eliminated by typecheck (doTypedApply) */ @@ -40,7 +38,7 @@ trait Trees extends reflect.internal.Trees { self: Global => /** Array selection . only used during erasure */ case class SelectFromArray(qualifier: Tree, name: Name, erasure: Type) - extends TermTree with RefTree { } + extends TermTree with RefTree /** emitted by typer, eliminated by refchecks */ case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree @@ -163,7 +161,7 @@ trait Trees extends reflect.internal.Trees { self: Global => traverser.traverse(qualifier) case ReferenceToBoxed(idt) => traverser.traverse(idt) - case TypeTreeWithDeferredRefCheck() => // TODO: should we traverse the wrapped tree? + case TypeTreeWithDeferredRefCheck() => // (and rewrap the result? how to update the deferred check? would need to store wrapped tree instead of returning it from check) case _ => super.xtraverse(traverser, tree) } diff --git a/src/compiler/scala/tools/nsc/interactive/Global.scala b/src/compiler/scala/tools/nsc/interactive/Global.scala index 0fea0a2d92..477cec8c8e 100644 --- a/src/compiler/scala/tools/nsc/interactive/Global.scala +++ b/src/compiler/scala/tools/nsc/interactive/Global.scala @@ -1060,6 +1060,8 @@ class Global(settings: Settings, _reporter: Reporter, projectName: String = "") implicit def addOnTypeError[T](x: => T): OnTypeError[T] = new OnTypeError(x) + // OnTypeError should still catch TypeError because of cyclic references, + // but DivergentImplicit shouldn't leak anymore here class OnTypeError[T](op: => T) { def onTypeError(alt: => T) = try { op diff --git a/src/compiler/scala/tools/nsc/plugins/Plugins.scala b/src/compiler/scala/tools/nsc/plugins/Plugins.scala index 36227c1052..da913a1601 100644 --- a/src/compiler/scala/tools/nsc/plugins/Plugins.scala +++ b/src/compiler/scala/tools/nsc/plugins/Plugins.scala @@ -28,7 +28,7 @@ trait Plugins { val dirs = (settings.pluginsDir.value split File.pathSeparator).toList map Path.apply val classes = Plugin.loadAllFrom(jars, dirs, settings.disable.value) - // Lach plugin must only be instantiated once. A common pattern + // Each plugin must only be instantiated once. A common pattern // is to register annotation checkers during object construction, so // creating multiple plugin instances will leave behind stale checkers. classes map (Plugin.instantiate(_, this)) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala index 2eddd36db0..25ae6f33d2 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/Pickler.scala @@ -59,7 +59,7 @@ abstract class Pickler extends SubComponent { } } // If there are any erroneous types in the tree, then we will crash - // when we pickle it: so let's report an erorr instead. We know next + // when we pickle it: so let's report an error instead. We know next // to nothing about what happened, but our supposition is a lot better // than "bad type: " in terms of explanatory power. for (t <- unit.body ; if t.isErroneous) { diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 56d9658377..bdd6a73b79 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -422,7 +422,7 @@ abstract class UnCurry extends InfoTransform if (tp.typeSymbol.isBottomClass) getManifest(AnyClass.tpe) else if (!manifestOpt.tree.isEmpty) manifestOpt.tree else if (tp.bounds.hi ne tp) getManifest(tp.bounds.hi) - else localTyper.getManifestTree(tree.pos, tp, false) + else localTyper.getManifestTree(tree, tp, false) } atPhase(phase.next) { localTyper.typedPos(pos) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala index 16d55c26ca..18c7635b1e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Analyzer.scala @@ -23,6 +23,7 @@ trait Analyzer extends AnyRef with Macros with NamesDefaults with TypeDiagnostics + with ContextErrors { val global : Global import global._ diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala new file mode 100644 index 0000000000..a762e44bda --- /dev/null +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -0,0 +1,1052 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package typechecker + +import scala.collection.{ mutable, immutable } +import scala.tools.util.StringOps.{ countElementsAsString, countAsString } +import symtab.Flags.{ PRIVATE, PROTECTED } +import scala.tools.util.EditDistance.similarString + +trait ContextErrors { + self: Analyzer => + + import global._ + + object ErrorKinds extends Enumeration { + type ErrorKind = Value + val Normal, Access, Ambiguous, Divergent = Value + } + + import ErrorKinds.ErrorKind + + trait AbsTypeError extends Throwable { + def errPos: Position + def errMsg: String + def kind: ErrorKind + } + + case class NormalTypeError(underlyingTree: Tree, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) + extends AbsTypeError { + + def errPos:Position = underlyingTree.pos + override def toString() = "[Type error at:" + underlyingTree.pos + "] " + errMsg + } + + case class SymbolTypeError(underlyingSym: Symbol, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) + extends AbsTypeError { + + def errPos = underlyingSym.pos + } + + case class TypeErrorWrapper(ex: TypeError, kind: ErrorKind = ErrorKinds.Normal) + extends AbsTypeError { + def errMsg = ex.msg + def errPos = ex.pos + } + + case class TypeErrorWithUnderlyingTree(tree: Tree, ex: TypeError, kind: ErrorKind = ErrorKinds.Normal) + extends AbsTypeError { + def errMsg = ex.msg + def errPos = tree.pos + } + + case class AmbiguousTypeError(underlyingTree: Tree, errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Ambiguous) extends AbsTypeError + + case class PosAndMsgTypeError(errPos: Position, errMsg: String, kind: ErrorKind = ErrorKinds.Normal) extends AbsTypeError + + object ErrorUtils { + def issueNormalTypeError(tree: Tree, msg: String)(implicit context: Context) { + issueTypeError(NormalTypeError(tree, msg)) + } + + def issueSymbolTypeError(sym: Symbol, msg: String)(implicit context: Context) { + issueTypeError(SymbolTypeError(sym, msg)) + } + + def issueDivergentImplicitsError(tree: Tree, msg: String)(implicit context: Context) { + issueTypeError(NormalTypeError(tree, msg, ErrorKinds.Divergent)) + } + + def issueAmbiguousTypeError(pre: Type, sym1: Symbol, sym2: Symbol, err: AmbiguousTypeError)(implicit context: Context) { + context.issueAmbiguousError(pre, sym1, sym2, err) + } + + def issueTypeError(err: AbsTypeError)(implicit context: Context) { context.issue(err) } + + def typeErrorMsg(found: Type, req: Type, possiblyMissingArgs: Boolean) = { + def missingArgsMsg = if (possiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else "" + "type mismatch" + foundReqMsg(found, req) + missingArgsMsg + } + } + + import ErrorUtils._ + + trait TyperContextErrors { + self: Typer => + + import infer.setError + + object TyperErrorGen { + implicit val context0: Context = infer.getContext + + def UnstableTreeError(tree: Tree) = { + def addendum = { + "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile." + } + issueNormalTypeError(tree, + "stable identifier required, but "+tree+" found." + ( + if (isStableExceptVolatile(tree)) addendum else "")) + setError(tree) + } + + def NoImplicitFoundError(tree: Tree, param: Symbol) = { + def errMsg = { + val paramName = param.name + val paramTp = param.tpe + paramTp.typeSymbol match { + case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp) + case _ => + "could not find implicit value for "+ + (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type " + else "parameter "+paramName+": ")+paramTp + } + } + issueNormalTypeError(tree, errMsg) + } + + def AdaptTypeError(tree: Tree, found: Type, req: Type) = { + // If the expected type is a refinement type, and the found type is a refinement or an anon + // class, we can greatly improve the error message by retyping the tree to recover the actual + // members present, then display along with the expected members. This is done here because + // this is the last point where we still have access to the original tree, rather than just + // the found/req types. + val foundType: Type = req.normalize match { + case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => + val retyped = typed (tree.duplicate setType null) + val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) + + if (foundDecls.isEmpty) found + else { + // The members arrive marked private, presumably because there was no + // expected type and so they're considered members of an anon class. + foundDecls foreach (_ resetFlag (PRIVATE | PROTECTED)) + // TODO: if any of the found parents match up with required parents after normalization, + // print the error so that they match. The major beneficiary there would be + // java.lang.Object vs. AnyRef. + refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos) + } + case _ => + found + } + assert(!found.isErroneous && !req.isErroneous) + + issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) ) + if (settings.explaintypes.value) + explainTypes(found, req) + } + + def WithFilterError(tree: Tree, ex: AbsTypeError) = { + issueTypeError(ex) + setError(tree) + } + + def ParentTypesError(templ: Template, ex: TypeError) = { + templ.tpe = null + issueNormalTypeError(templ, ex.getMessage()) + } + + // additional parentTypes errors + def ConstrArgsInTraitParentTpeError(arg: Tree, parent: Symbol) = + issueNormalTypeError(arg, parent + " is a trait; does not take constructor arguments") + + def MissingTypeArgumentsParentTpeError(supertpt: Tree) = + issueNormalTypeError(supertpt, "missing type arguments") + + // typedIdent + def AmbiguousIdentError(tree: Tree, name: Name, msg: String) = + NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg) + + def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = { + // This laborious determination arrived at to keep the tests working. + val calcSimilar = ( + name.length > 2 && ( + startingIdentCx.reportErrors + || startingIdentCx.enclClassOrMethod.reportErrors + ) + ) + // avoid calculating if we're in "silent" mode. + // name length check to limit unhelpful suggestions for e.g. "x" and "b1" + val similar = { + if (!calcSimilar) "" + else { + val allowed = ( + startingIdentCx.enclosingContextChain + flatMap (ctx => ctx.scope.toList ++ ctx.imports.flatMap(_.allImportedSymbols)) + filter (sym => sym.isTerm == name.isTermName) + filterNot (sym => sym.isPackage || sym.isSynthetic || sym.hasMeaninglessName) + ) + val allowedStrings = ( + allowed.map("" + _.name).distinct.sorted + filterNot (s => (s contains '$') || (s contains ' ')) + ) + similarString("" + name, allowedStrings) + } + } + NormalTypeError(tree, "not found: "+decodeWithKind(name, owner) + similar) + } + + // typedAppliedTypeTree + def AppliedTypeNoParametersError(tree: Tree, errTpe: Type) = { + issueNormalTypeError(tree, errTpe + " does not take type parameters") + setError(tree) + } + + def AppliedTypeWrongNumberOfArgsError(tree: Tree, tpt: Tree, tparams: List[Symbol]) = { + val tptSafeString: String = try { + tpt.tpe.toString() + } catch { + case _: CyclicReference => + tpt.toString() + } + val msg = "wrong number of type arguments for "+tptSafeString+", should be "+tparams.length + issueNormalTypeError(tree, msg) + setError(tree) + } + + // typedTypeDef + def LowerBoundError(tree: TypeDef, lowB: Type, highB: Type) = + issueNormalTypeError(tree, "lower bound "+lowB+" does not conform to upper bound "+highB) + + def HiddenSymbolWithError[T <: Tree](tree: T): T = + setError(tree) + + def SymbolEscapesScopeError[T <: Tree](tree: T, badSymbol: Symbol): T = { + val modifierString = if (badSymbol.isPrivate) "private " else "" + issueNormalTypeError(tree, modifierString + badSymbol + " escapes its defining scope as part of type "+tree.tpe) + setError(tree) + } + + // typedDefDef + def StarParamNotLastError(param: Tree) = + issueNormalTypeError(param, "*-parameter must come last") + + def StarWithDefaultError(meth: Symbol) = + issueSymbolTypeError(meth, "a parameter section with a `*'-parameter is not allowed to have default arguments") + + def InvalidConstructorDefError(ddef: Tree) = + issueNormalTypeError(ddef, "constructor definition not allowed here") + + def DeprecatedParamNameError(param: Symbol, name: Name) = + issueSymbolTypeError(param, "deprecated parameter name "+ name +" has to be distinct from any other parameter name (deprecated or not).") + + // computeParamAliases + def SuperConstrReferenceError(tree: Tree) = + NormalTypeError(tree, "super constructor cannot be passed a self reference unless parameter is declared by-name") + + def SuperConstrArgsThisReferenceError(tree: Tree) = + NormalTypeError(tree, "super constructor arguments cannot reference unconstructed `this`") + + // typedValDef + def VolatileValueError(vdef: Tree) = + issueNormalTypeError(vdef, "values cannot be volatile") + + def FinalVolatileVarError(vdef: Tree) = + issueNormalTypeError(vdef, "final vars cannot be volatile") + + def LocalVarUninitializedError(vdef: Tree) = + issueNormalTypeError(vdef, "local variables must be initialized") + + //typedAssign + def AssignmentError(tree: Tree, varSym: Symbol) = { + issueNormalTypeError(tree, + if (varSym != null && varSym.isValue) "reassignment to val" + else "assignment to non variable") + setError(tree) + } + + def UnexpectedTreeAssignmentConversionError(tree: Tree) = { + issueNormalTypeError(tree, "Unexpected tree during assignment conversion.") + setError(tree) + } + + def MultiDimensionalArrayError(tree: Tree) = { + issueNormalTypeError(tree, "cannot create a generic multi-dimensional array of more than "+ definitions.MaxArrayDims+" dimensions") + setError(tree) + } + + //typedSuper + def MixinMissingParentClassNameError(tree: Tree, mix: Name, clazz: Symbol) = + issueNormalTypeError(tree, mix+" does not name a parent class of "+clazz) + + def AmbiguousParentClassError(tree: Tree) = + issueNormalTypeError(tree, "ambiguous parent class qualifier") + + //typedSelect + def NotAMemberError(sel: Tree, qual: Tree, name: Name) = { + def errMsg = { + val owner = qual.tpe.typeSymbol + val target = qual.tpe.widen + def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else "" + def nameString = decodeWithKind(name, owner) + /** Illuminating some common situations and errors a bit further. */ + def addendum = { + val companion = { + if (name.isTermName && owner.isPackageClass) { + target.member(name.toTypeName) match { + case NoSymbol => "" + case sym => "\nNote: %s exists, but it has no companion object.".format(sym) + } + } + else "" + } + val semicolon = ( + if (linePrecedes(qual, sel)) + "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?" + else + "" + ) + companion + semicolon + } + withAddendum(qual.pos)( + if (name == nme.CONSTRUCTOR) target + " does not have a constructor" + else nameString + " is not a member of " + targetKindString + target + addendum + ) + } + issueNormalTypeError(sel, errMsg) + // the error has to be set for the copied tree, otherwise + // the error remains persistent acros multiple compilations + // and causes problems + //setError(sel) + } + + //typedNew + def IsAbstractError(tree: Tree, sym: Symbol) = { + issueNormalTypeError(tree, sym + " is abstract; cannot be instantiated") + setError(tree) + } + + def DoesNotConformToSelfTypeError(tree: Tree, sym: Symbol, tpe0: Type) = { + issueNormalTypeError(tree, sym + " cannot be instantiated because it does not conform to its self-type " + tpe0) + setError(tree) + } + + //typedEta + def UnderscoreEtaError(tree: Tree) = { + issueNormalTypeError(tree, "_ must follow method; cannot follow " + tree.tpe) + setError(tree) + } + + //typedReturn + def ReturnOutsideOfDefError(tree: Tree) = { + issueNormalTypeError(tree, "return outside method definition") + setError(tree) + } + + def ReturnWithoutTypeError(tree: Tree, owner: Symbol) = { + issueNormalTypeError(tree, owner + " has return statement; needs result type") + setError(tree) + } + + //typedBind + def VariableInPatternAlternativeError(tree: Tree) = { + issueNormalTypeError(tree, "illegal variable in pattern alternative") + //setError(tree) + } + + //typedCase + def StarPositionInPatternError(tree: Tree) = + issueNormalTypeError(tree, "_* may only come last") + + //typedFunction + def MaxFunctionArityError(fun: Tree) = { + issueNormalTypeError(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters") + setError(fun) + } + + def WrongNumberOfParametersError(tree: Tree, argpts: List[Type]) = { + issueNormalTypeError(tree, "wrong number of parameters; expected = " + argpts.length) + setError(tree) + } + + def MissingParameterTypeError(fun: Tree, vparam: ValDef, pt: Type) = { + def anonMessage = ( + "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" + + "\nExpected type was: " + pt.toLongString + ) + + val suffix = + if (!vparam.mods.isSynthetic) "" + else " for expanded function" + (fun match { + case Function(_, Match(_, _)) => anonMessage + case _ => " " + fun + }) + + issueNormalTypeError(vparam, "missing parameter type" + suffix) + } + + def ConstructorsOrderError(tree: Tree) = { + issueNormalTypeError(tree, "called constructor's definition must precede calling constructor's definition") + setError(tree) + } + + def OnlyDeclarationsError(tree: Tree) = { + issueNormalTypeError(tree, "only declarations allowed here") + setError(tree) + } + + // typedAnnotation + def AnnotationNotAConstantError(tree: Tree) = + NormalTypeError(tree, "annotation argument needs to be a constant; found: " + tree) + + def AnnotationArgNullError(tree: Tree) = + NormalTypeError(tree, "annotation argument cannot be null") + + def ArrayConstantsError(tree: Tree) = + NormalTypeError(tree, "Array constants have to be specified using the `Array(...)' factory method") + + def ArrayConstantsTypeMismatchError(tree: Tree, pt: Type) = + NormalTypeError(tree, "found array constant, expected argument of type " + pt) + + def UnexpectedTreeAnnotation(tree: Tree) = + NormalTypeError(tree, "unexpected tree in annotation: "+ tree) + + def AnnotationTypeMismatchError(tree: Tree, expected: Type, found: Type) = + NormalTypeError(tree, "expected annotation of type " + expected + ", found " + found) + + def MultipleArgumentListForAnnotationError(tree: Tree) = + NormalTypeError(tree, "multiple argument lists on classfile annotation") + + def UnknownAnnotationNameError(tree: Tree, name: Name) = + NormalTypeError(tree, "unknown annotation argument name: " + name) + + def DuplicateValueAnnotationError(tree: Tree, name: Name) = + NormalTypeError(tree, "duplicate value for annotation argument " + name) + + def ClassfileAnnotationsAsNamedArgsError(tree: Tree) = + NormalTypeError(tree, "classfile annotation arguments have to be supplied as named arguments") + + def AnnotationMissingArgError(tree: Tree, annType: Type, sym: Symbol) = + NormalTypeError(tree, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name) + + def NestedAnnotationError(tree: Tree, annType: Type) = + NormalTypeError(tree, "nested classfile annotations must be defined in java; found: "+ annType) + + def UnexpectedTreeAnnotationError(tree: Tree, unexpected: Tree) = + NormalTypeError(tree, "unexpected tree after typing annotation: "+ unexpected) + + // TODO no test case + //typedExistentialTypeTree + def AbstractionFromVolatileTypeError(vd: ValDef) = + issueNormalTypeError(vd, "illegal abstraction from value with volatile type "+vd.symbol.tpe) + + def TypedApplyWrongNumberOfTpeParametersError(tree: Tree, fun: Tree) = { + issueNormalTypeError(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun)) + setError(tree) + } + + def TypedApplyDoesNotTakeTpeParametersError(tree: Tree, fun: Tree) = { + issueNormalTypeError(tree, treeSymTypeMsg(fun)+" does not take type parameters.") + setError(tree) + } + + // doTypeApply + //tryNamesDefaults + def WrongNumberOfArgsError(tree: Tree, fun: Tree) = + NormalTypeError(tree, "wrong number of arguments for "+ treeSymTypeMsg(fun)) + + def TooManyArgsNamesDefaultsError(tree: Tree, fun: Tree) = + NormalTypeError(tree, "too many arguments for "+treeSymTypeMsg(fun)) + + // can it still happen? see test case neg/t960.scala + // TODO no test case + def OverloadedUnapplyError(tree: Tree) = + issueNormalTypeError(tree, "cannot resolve overloaded unapply") + + def UnapplyWithSingleArgError(tree: Tree) = + issueNormalTypeError(tree, "an unapply method must accept a single argument.") + + def MultipleVarargError(tree: Tree) = + NormalTypeError(tree, "when using named arguments, the vararg parameter has to be specified exactly once") + + def ModuleUsingCompanionClassDefaultArgsErrror(tree: Tree) = + NormalTypeError(tree, "module extending its companion class cannot use default constructor arguments") + + def NotEnoughArgsError(tree: Tree, fun0: Tree, missing0: List[Symbol]) = { + def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]) = { + val suffix = { + if (missing.isEmpty) "" + else { + val keep = missing take 3 map (_.name) + ".\nUnspecified value parameter%s %s".format( + if (missing.tail.isEmpty) "" else "s", + if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ") + else keep.mkString("", ", ", ".") + ) + } + } + + "not enough arguments for " + treeSymTypeMsg(fun) + suffix + } + NormalTypeError(tree, notEnoughArgumentsMsg(fun0, missing0)) + } + + //doTypedApply - patternMode + // TODO: missing test case + def TooManyArgsPatternError(fun: Tree) = + NormalTypeError(fun, "too many arguments for unapply pattern, maximum = "+definitions.MaxTupleArity) + + def WrongNumberArgsPatternError(tree: Tree, fun: Tree) = + NormalTypeError(tree, "wrong number of arguments for "+treeSymTypeMsg(fun)) + + def ApplyWithoutArgsError(tree: Tree, fun: Tree) = + NormalTypeError(tree, fun.tpe+" does not take parameters") + + //checkClassType + def TypeNotAStablePrefixError(tpt: Tree, pre: Type) = { + issueNormalTypeError(tpt, "type "+pre+" is not a stable prefix") + setError(tpt) + } + + def ClassTypeRequiredError(tree: Tree, found: AnyRef) = { + issueNormalTypeError(tree, "class type required but "+found+" found") + setError(tree) + } + + // validateParentClasses + def ParentSuperSubclassError(parent: Tree, superclazz: Symbol, + parentSym: Symbol, mixin: Symbol) = + NormalTypeError(parent, "illegal inheritance; super"+superclazz+ + "\n is not a subclass of the super"+parentSym+ + "\n of the mixin " + mixin) + + def ParentNotATraitMixinError(parent: Tree, mixin: Symbol) = + NormalTypeError(parent, mixin+" needs to be a trait to be mixed in") + + def ParentFinalInheritanceError(parent: Tree, mixin: Symbol) = + NormalTypeError(parent, "illegal inheritance from final "+mixin) + + def ParentSealedInheritanceError(parent: Tree, psym: Symbol) = + NormalTypeError(parent, "illegal inheritance from sealed " + psym + ": " + context.unit.source.file.canonicalPath + " != " + psym.sourceFile.canonicalPath) + + def ParentSelfTypeConformanceError(parent: Tree, selfType: Type) = + NormalTypeError(parent, + "illegal inheritance;\n self-type "+selfType+" does not conform to "+ + parent +"'s selftype "+parent.tpe.typeOfThis) + + // TODO: missing test case + def ParentInheritedTwiceError(parent: Tree, parentSym: Symbol) = + NormalTypeError(parent, parentSym+" is inherited twice") + + //adapt + def MissingArgsForMethodTpeError(tree: Tree, meth: Symbol) = { + issueNormalTypeError(tree, + "missing arguments for " + meth.fullLocationString + ( + if (meth.isConstructor) "" + else ";\nfollow this method with `_' if you want to treat it as a partially applied function" + )) + setError(tree) + } + + def MissingTypeParametersError(tree: Tree) = { + issueNormalTypeError(tree, tree.symbol+" takes type parameters") + setError(tree) + } + + def KindArityMismatchError(tree: Tree, pt: Type) = { + issueNormalTypeError(tree, + tree.tpe+" takes "+countElementsAsString(tree.tpe.typeParams.length, "type parameter")+ + ", expected: "+countAsString(pt.typeParams.length)) + setError(tree) + } + + def CaseClassConstructorError(tree: Tree) = { + issueNormalTypeError(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method") + setError(tree) + } + + //TODO Needs test case + def ConstructorPrefixError(tree: Tree, restpe: Type) = { + issueNormalTypeError(tree, restpe.prefix+" is not a legal prefix for a constructor") + setError(tree) + } + + // SelectFromTypeTree + def TypeSelectionFromVolatileTypeError(tree: Tree, qual: Tree) = { + issueNormalTypeError(tree, "illegal type selection from volatile type "+qual.tpe) + setError(tree) + } + + // packedType + def InferTypeWithVolatileTypeSelectionError(tree: Tree, pre: Type) = + issueNormalTypeError(tree, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre) + + def AbstractExistentiallyOverParamerizedTpeError(tree: Tree, tp: Type) = + issueNormalTypeError(tree, "can't existentially abstract over parameterized type " + tp) + + //manifestTreee + def MissingManifestError(tree: Tree, full: Boolean, tp: Type) = { + issueNormalTypeError(tree, "cannot find "+(if (full) "" else "class ")+"manifest for element type "+tp) + setError(tree) + } + + // TODO needs test case + // cases where we do not necessarily return trees + def DependentMethodTpeConversionToFunctionError(tree: Tree, tp: Type) = + issueNormalTypeError(tree, "method with dependent type "+tp+" cannot be converted to function value") + + //checkStarPatOK + def StarPatternWithVarargParametersError(tree: Tree) = + issueNormalTypeError(tree, "star patterns must correspond with varargs parameters") + + // TODO missing test case + def FinitaryError(tparam: Symbol) = + issueSymbolTypeError(tparam, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive") + + // TODO missing test case for a second case + def QualifyingClassError(tree: Tree, qual: Name) = { + issueNormalTypeError(tree, + if (qual.isEmpty) tree + " can be used only in a class, object, or template" + else qual + " is not an enclosing class") + setError(tree) + } + + // def stabilize + def NotAValueError(tree: Tree, sym: Symbol) = { + issueNormalTypeError(tree, sym.kindString + " " + sym.fullName + " is not a value") + setError(tree) + } + + // checkNoDoubleDefs... + def DefDefinedTwiceError(sym0: Symbol, sym1: Symbol) = + issueSymbolTypeError(sym0, sym1+" is defined twice"+ + {if(!settings.debug.value) "" else " in "+context0.unit}+ + {if (sym0.isMacro && sym1.isMacro) " \n(note that macros cannot be overloaded)" else ""}) + + // cyclic errors + def CyclicAliasingOrSubtypingError(errPos: Position, sym0: Symbol) = + issueTypeError(PosAndMsgTypeError(errPos, "cyclic aliasing or subtyping involving "+sym0)) + + def CyclicReferenceError(errPos: Position, lockedSym: Symbol) = + issueTypeError(PosAndMsgTypeError(errPos, "illegal cyclic reference involving " + lockedSym)) + + def MacroExpandError(tree: Tree, t: Any) = { + issueNormalTypeError(tree, "macros must return a compiler-specific tree; returned class is: " + t.getClass) + setError(tree) + } + } + } + + trait InferencerContextErrors { + self: Inferencer => + + private def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = { + def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")") + + def resType = if (pt isWildcard) "" else " with expected result type " + pt + def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt + def locals = alternatives(tree) flatMap (_.typeParams) + + withDisambiguation(locals, allTypes: _*) { + treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType + } + } + + object InferErrorGen { + + implicit val context0 = getContext + + object PolyAlternativeErrorKind extends Enumeration { + type ErrorType = Value + val WrongNumber, NoParams, ArgsDoNotConform = Value + } + + private def ambiguousErrorMsgPos(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) = + if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) { + val methodName = nme.defaultGetterToMethod(sym1.name) + (sym1.enclClass.pos, + "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName + + " define default arguments") + } else { + (pos, + ("ambiguous reference to overloaded definition,\n" + + "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) + + "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) + + "\nmatch " + rest) + ) + } + + def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = { + def errMsg = { + val location = if (sym.isClassConstructor) owner0 else pre.widen + + underlying(sym).fullLocationString + " cannot be accessed in " + + location + explanation + } + NormalTypeError(tree, errMsg, ErrorKinds.Access) + } + + def NoMethodInstanceError(fn: Tree, args: List[Tree], msg: String) = + issueNormalTypeError(fn, + "no type parameters for " + + applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) + + "\n --- because ---\n" + msg) + + // TODO: no test case + def NoConstructorInstanceError(tree: Tree, restpe: Type, pt: Type, msg: String) = { + issueNormalTypeError(tree, + "constructor of type " + restpe + + " cannot be uniquely instantiated to expected type " + pt + + "\n --- because ---\n" + msg) + setError(tree) + } + + def ConstrInstantiationError(tree: Tree, restpe: Type, pt: Type) = { + issueNormalTypeError(tree, + "constructor cannot be instantiated to expected type" + foundReqMsg(restpe, pt)) + setError(tree) + } + + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = + issueNormalTypeError(tree, + applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) + + def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, + firstCompeting: Symbol, argtpes: List[Type], pt: Type) = { + val msg0 = + "argument types " + argtpes.mkString("(", ",", ")") + + (if (pt == WildcardType) "" else " and expected result type " + pt) + val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, msg0) + issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg)) + } + + def NoBestExprAlternativeError(tree: Tree, pt: Type) = + issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(tree.symbol.tpe, pt, isPossiblyMissingArgs(tree.symbol.tpe, pt)))) + + def AmbiguousExprAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, pt: Type) = { + val (pos, msg) = ambiguousErrorMsgPos(tree.pos, pre, best, firstCompeting, "expected type " + pt) + setError(tree) + issueAmbiguousTypeError(pre, best, firstCompeting, AmbiguousTypeError(tree, pos, msg)) + } + + // checkBounds + def KindBoundErrors(tree: Tree, prefix: String, targs: List[Type], + tparams: List[Symbol], kindErrors: List[String]) = { + issueNormalTypeError(tree, + prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") + + " do not conform to the expected kinds of the type parameters "+ + tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." + + kindErrors.toList.mkString("\n", ", ", "")) + } + + def NotWithinBounds(tree: Tree, prefix: String, targs: List[Type], + tparams: List[Symbol], kindErrors: List[String]) = { + if (settings.explaintypes.value) { + val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds) + (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ)) + (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi)) + () + } + + issueNormalTypeError(tree, + prefix + "type arguments " + targs.mkString("[", ",", "]") + + " do not conform to " + tparams.head.owner + "'s type parameter bounds " + + (tparams map (_.defString)).mkString("[", ",", "]")) + } + + //substExpr + def PolymorphicExpressionInstantiationError(tree: Tree, undetparams: List[Symbol], pt: Type) = + issueNormalTypeError(tree, + "polymorphic expression cannot be instantiated to expected type" + + foundReqMsg(polyType(undetparams, skipImplicit(tree.tpe)), pt)) + + //checkCheckable + def TypePatternOrIsInstanceTestError(tree: Tree, tp: Type) = + issueNormalTypeError(tree, "type "+tp+" cannot be used in a type pattern or isInstanceOf test") + + def PatternTypeIncompatibleWithPtError1(tree: Tree, pattp: Type, pt: Type) = + issueNormalTypeError(tree, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt)) + + def IncompatibleScrutineeTypeError(tree: Tree, pattp: Type, pt: Type) = + issueNormalTypeError(tree, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt)) + + def PatternTypeIncompatibleWithPtError2(pat: Tree, pt1: Type, pt: Type) = { + def errMsg = { + val sym = pat.tpe.typeSymbol + val clazz = sym.companionClass + val addendum = ( + if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) { + // TODO: move these somewhere reusable. + val typeString = clazz.typeParams match { + case Nil => "" + clazz.name + case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]") + } + val caseString = ( + clazz.caseFieldAccessors + map (_ => "_") // could use the actual param names here + mkString (clazz.name + "(", ",", ")") + ) + ( + "\nNote: if you intended to match against the class, try `case _: " + + typeString + "` or `case " + caseString + "`" + ) + } + else "" + ) + "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum + } + issueNormalTypeError(pat, errMsg) + } + + def PolyAlternativeError(tree: Tree, argtypes: List[Type], sym: Symbol, err: PolyAlternativeErrorKind.ErrorType) = { + import PolyAlternativeErrorKind._ + val msg = + err match { + case WrongNumber => + "wrong number of type parameters for " + treeSymTypeMsg(tree) + case NoParams => + treeSymTypeMsg(tree) + " does not take type parameters" + case ArgsDoNotConform => + "type arguments " + argtypes.mkString("[", ",", "]") + + " conform to the bounds of none of the overloaded alternatives of\n "+sym+ + ": "+sym.info + } + issueNormalTypeError(tree, msg) + () + } + } + } + + trait NamerContextErrors { + self: Namer => + + object NamerErrorGen { + + implicit val context0 = context + + object SymValidateErrors extends Enumeration { + val ImplicitConstr, ImplicitNotTerm, ImplicitTopObject, + OverrideClass, SealedNonClass, AbstractNonClass, + OverrideConstr, AbstractOverride, LazyAndEarlyInit, + ByNameParameter, AbstractVar = Value + } + + object DuplicatesErrorKinds extends Enumeration { + val RenamedTwice, AppearsTwice = Value + } + + import SymValidateErrors._ + import DuplicatesErrorKinds._ + import symtab.Flags + + def TypeSigError(tree: Tree, ex: TypeError) = { + ex match { + case CyclicReference(sym, info: TypeCompleter) => + issueNormalTypeError(tree, typer.cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) + case _ => + context0.issue(TypeErrorWithUnderlyingTree(tree, ex)) + } + } + + def GetterDefinedTwiceError(getter: Symbol) = + issueSymbolTypeError(getter, getter+" is defined twice") + + def ValOrValWithSetterSuffixError(tree: Tree) = + issueNormalTypeError(tree, "Names of vals or vars may not end in `_='") + + def PrivateThisCaseClassParameterError(tree: Tree) = + issueNormalTypeError(tree, "private[this] not allowed for case class parameters") + + def BeanPropertyAnnotationLimitationError(tree: Tree) = + issueNormalTypeError(tree, "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import") + + def BeanPropertyAnnotationFieldWithoutLetterError(tree: Tree) = + issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to fields that start with a letter") + + def BeanPropertyAnnotationPrivateFieldError(tree: Tree) = + issueNormalTypeError(tree, "`BeanProperty' annotation can be applied only to non-private fields") + + def DoubleDefError(currentSym: Symbol, prevSym: Symbol) = { + val s1 = if (prevSym.isModule) "case class companion " else "" + val s2 = if (prevSym.isSynthetic) "(compiler-generated) " + s1 else "" + val s3 = if (prevSym.isCase) "case class " + prevSym.name else "" + prevSym + + issueSymbolTypeError(currentSym, prevSym.name + " is already defined as " + s2 + s3) + } + + def MaxParametersCaseClassError(tree: Tree) = + issueNormalTypeError(tree, "Implementation restriction: case classes cannot have more than " + definitions.MaxFunctionArity + " parameters.") + + def InheritsItselfError(tree: Tree) = + issueNormalTypeError(tree, tree.tpe.typeSymbol+" inherits itself") + + def MissingParameterOrValTypeError(vparam: Tree) = + issueNormalTypeError(vparam, "missing parameter type") + + def RootImportError(tree: Tree) = + issueNormalTypeError(tree, "_root_ cannot be imported") + + def SymbolValidationError(sym: Symbol, errKind: SymValidateErrors.Value) { + val msg = errKind match { + case ImplicitConstr => + "`implicit' modifier not allowed for constructors" + + case ImplicitNotTerm => + "`implicit' modifier can be used only for values, variables and methods" + + case ImplicitTopObject => + "`implicit' modifier cannot be used for top-level objects" + + case OverrideClass => + "`override' modifier not allowed for classes" + + case SealedNonClass => + "`sealed' modifier can be used only for classes" + + case AbstractNonClass => + "`abstract' modifier can be used only for classes; it should be omitted for abstract members" + + case OverrideConstr => + "`override' modifier not allowed for constructors" + + case AbstractOverride => + "`abstract override' modifier only allowed for members of traits" + + case LazyAndEarlyInit => + "`lazy' definitions may not be initialized early" + + case ByNameParameter => + "pass-by-name arguments not allowed for case class parameters" + + case AbstractVar => + "only classes can have declared but undefined members" + abstractVarMessage(sym) + + } + issueSymbolTypeError(sym, msg) + } + + + def AbstractMemberWithModiferError(sym: Symbol, flag: Int) = + issueSymbolTypeError(sym, "abstract member may not have " + Flags.flagsToString(flag) + " modifier") + + def IllegalModifierCombination(sym: Symbol, flag1: Int, flag2: Int) = + issueSymbolTypeError(sym, "illegal combination of modifiers: %s and %s for: %s".format( + Flags.flagsToString(flag1), Flags.flagsToString(flag2), sym)) + + def IllegalDependentMethTpeError(sym: Symbol)(context: Context) = { + val errorAddendum = + ": parameter appears in the type of another parameter in the same section or an earlier one" + issueSymbolTypeError(sym, "illegal dependent method type" + errorAddendum)(context) + } + + def DuplicatesError(tree: Tree, name: Name, kind: DuplicatesErrorKinds.Value) = { + val msg = kind match { + case RenamedTwice => + "is renamed twice" + case AppearsTwice => + "appears twice as a target of a renaming" + } + + issueNormalTypeError(tree, name.decode + " " + msg) + } + } + } + + trait ImplicitsContextErrors { + self: ImplicitSearch => + + import definitions._ + + def AmbiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo, + pre1: String, pre2: String, trailer: String) + (isView: Boolean, pt: Type, tree: Tree)(implicit context0: Context) = { + if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) { + val coreMsg = + pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+ + pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+ + trailer + val errMsg = + if (isView) { + val found = pt.typeArgs(0) + val req = pt.typeArgs(1) + def defaultExplanation = + "Note that implicit conversions are not applicable because they are ambiguous:\n "+ + coreMsg+"are possible conversion functions from "+ found+" to "+req + + def explanation = { + val sym = found.typeSymbol + // Explain some common situations a bit more clearly. + if (AnyRefClass.tpe <:< req) { + if (sym == AnyClass || sym == UnitClass) { + "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" + + "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so." + } + else boxedClass get sym match { + case Some(boxed) => + "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" + + "methods inherited from Object are rendered ambiguous. This is to avoid\n" + + "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" + + "You may wish to use a type ascription: `x: " + boxed.fullName + "`." + case _ => + defaultExplanation + } + } + else defaultExplanation + } + + typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req)) + "\n" + explanation + } else { + "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt + } + context.issueAmbiguousError(AmbiguousTypeError(tree, tree.pos, errMsg)) + } + } + + def DivergingImplicitExpansionError(tree: Tree, pt: Type, sym: Symbol)(implicit context0: Context) = + issueDivergentImplicitsError(tree, + "diverging implicit expansion for type "+pt+"\nstarting with "+ + sym.fullLocationString) + } + + object NamesDefaultsErrorsGen { + import typer.infer.setError + + def NameClashError(sym: Symbol, arg: Tree)(implicit context: Context) = { + setError(arg) // to distinguish it from ambiguous reference error + + def errMsg = + "%s definition needs %s because '%s' is used as a named argument in its body.".format( + "variable", // "method" + "type", // "result type" + sym.name) + issueSymbolTypeError(sym, errMsg) + } + + def AmbiguousReferenceInNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = { + if (!arg.isErroneous) { // check if name clash wasn't reported already + issueNormalTypeError(arg, + "reference to "+ name +" is ambiguous; it is both a method parameter "+ + "and a variable in scope.") + setError(arg) + } else arg + } + + def UnknownParameterNameNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = { + issueNormalTypeError(arg, "unknown parameter name: " + name) + setError(arg) + } + + def DoubleParamNamesDefaultError(arg: Tree, name: Name)(implicit context: Context) = { + issueNormalTypeError(arg, "parameter specified twice: "+ name) + setError(arg) + } + + def PositionalAfterNamedNamesDefaultError(arg: Tree)(implicit context: Context) = { + issueNormalTypeError(arg, "positional after named argument.") + setError(arg) + } + } +} diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index faff4ccab2..d828b019f9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -7,7 +7,7 @@ package scala.tools.nsc package typechecker import symtab.Flags._ -import scala.collection.mutable.ListBuffer +import scala.collection.mutable.{LinkedHashSet, Set} import annotation.tailrec /** @@ -66,8 +66,7 @@ trait Contexts { self: Analyzer => sc.depth += 1 } val c = sc.make(unit, tree, sc.owner, sc.scope, sc.imports) - c.reportAmbiguousErrors = !erasedTypes - c.reportGeneralErrors = !erasedTypes + if (erasedTypes) c.setThrowErrors() else c.setReportErrors() c.implicitsEnabled = !erasedTypes c } @@ -83,7 +82,17 @@ trait Contexts { self: Analyzer => } } + private object Errors { + final val ReportErrors = 1 << 0 + final val BufferErrors = 1 << 1 + final val AmbiguousErrors = 1 << 2 + final val notThrowMask = ReportErrors | BufferErrors + final val AllMask = ReportErrors | BufferErrors | AmbiguousErrors + } + class Context private[typechecker] { + import Errors._ + var unit: CompilationUnit = NoCompilationUnit var tree: Tree = _ // Tree associated with this context var owner: Symbol = NoSymbol // The current owner @@ -109,8 +118,6 @@ trait Contexts { self: Analyzer => // (the call to the super or self constructor in the first line of a constructor) // in this context the object's fields should not be in scope - var reportAmbiguousErrors = false - var reportGeneralErrors = false var diagnostic: List[String] = Nil // these messages are printed when issuing an error var implicitsEnabled = false var checking = false @@ -138,12 +145,41 @@ trait Contexts { self: Analyzer => tparams } - def withoutReportingErrors[T](op: => T): T = { - val saved = reportGeneralErrors - reportGeneralErrors = false - try op - finally reportGeneralErrors = saved + private[this] var mode = 0 + private[this] val buffer = LinkedHashSet[AbsTypeError]() + + def errBuffer = buffer + def hasErrors = buffer.nonEmpty + + def state: Int = mode + def restoreState(state0: Int) = mode = state0 + + def reportErrors = (state & ReportErrors) != 0 + def bufferErrors = (state & BufferErrors) != 0 + def ambiguousErrors = (state & AmbiguousErrors) != 0 + def throwErrors = (state & notThrowMask) == 0 + + def setReportErrors() = mode = (ReportErrors | AmbiguousErrors) + def setBufferErrors() = { + assert(bufferErrors || !hasErrors, "When entering the buffer state, context has to be clean. Current buffer: " + buffer) + mode = BufferErrors + } + def setThrowErrors() = mode &= (~AllMask) + def setAmbiguousErrors(report: Boolean) = if (report) mode |= AmbiguousErrors else mode &= notThrowMask + + def updateBuffer(errors: Set[AbsTypeError]) = buffer ++= errors + def condBufferFlush(removeP: AbsTypeError => Boolean) { + val elems = buffer.filter(removeP) + buffer --= elems } + def flushBuffer() { buffer.clear() } + def flushAndReturnBuffer(): Set[AbsTypeError] = { + val current = buffer.clone() + buffer.clear() + current + } + + def logError(err: AbsTypeError) = buffer += err def withImplicitsDisabled[T](op: => T): T = { val saved = implicitsEnabled @@ -183,8 +219,7 @@ trait Contexts { self: Analyzer => c.depth = if (scope == this.scope) this.depth else this.depth + 1 c.imports = imports c.inSelfSuperCall = inSelfSuperCall - c.reportAmbiguousErrors = this.reportAmbiguousErrors - c.reportGeneralErrors = this.reportGeneralErrors + c.restoreState(this.state) c.diagnostic = this.diagnostic c.typingIndentLevel = typingIndentLevel c.implicitsEnabled = this.implicitsEnabled @@ -196,10 +231,10 @@ trait Contexts { self: Analyzer => c } + // TODO: remove? Doesn't seem to be used def make(unit: CompilationUnit): Context = { val c = make(unit, EmptyTree, owner, scope, imports) - c.reportAmbiguousErrors = true - c.reportGeneralErrors = true + c.setReportErrors() c.implicitsEnabled = true c } @@ -229,8 +264,8 @@ trait Contexts { self: Analyzer => def makeSilent(reportAmbiguousErrors: Boolean, newtree: Tree = tree): Context = { val c = make(newtree) - c.reportGeneralErrors = false - c.reportAmbiguousErrors = reportAmbiguousErrors + c.setBufferErrors() + c.setAmbiguousErrors(reportAmbiguousErrors) c } @@ -242,13 +277,11 @@ trait Contexts { self: Analyzer => def makeConstructorContext = { var baseContext = enclClass.outer - //todo: find out why we need next line while (baseContext.tree.isInstanceOf[Template]) baseContext = baseContext.outer val argContext = baseContext.makeNewScope(tree, owner) argContext.inSelfSuperCall = true - argContext.reportGeneralErrors = this.reportGeneralErrors - argContext.reportAmbiguousErrors = this.reportAmbiguousErrors + argContext.restoreState(this.state) def enterElems(c: Context) { def enterLocalElems(e: ScopeEntry) { if (e != null && e.owner == c.scope) { @@ -275,41 +308,41 @@ trait Contexts { self: Analyzer => private def unitError(pos: Position, msg: String) = unit.error(pos, if (checking) "\n**** ERROR DURING INTERNAL CHECKING ****\n" + msg else msg) + def issue(err: AbsTypeError) { + if (reportErrors) unitError(err.errPos, addDiagString(err.errMsg)) + else if (bufferErrors) { buffer += err } + else throw new TypeError(err.errPos, err.errMsg) + } + + def issueAmbiguousError(pre: Type, sym1: Symbol, sym2: Symbol, err: AbsTypeError) { + if (ambiguousErrors) { + if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous) + unitError(err.errPos, err.errMsg) + } else if (bufferErrors) { buffer += err } + else throw new TypeError(err.errPos, err.errMsg) + } + + def issueAmbiguousError(err: AbsTypeError) { + if (ambiguousErrors) + unitError(err.errPos, addDiagString(err.errMsg)) + else if (bufferErrors) { buffer += err } + else throw new TypeError(err.errPos, err.errMsg) + } + + // TODO remove def error(pos: Position, err: Throwable) = - if (reportGeneralErrors) unitError(pos, addDiagString(err.getMessage())) + if (reportErrors) unitError(pos, addDiagString(err.getMessage())) else throw err def error(pos: Position, msg: String) = { val msg1 = addDiagString(msg) - if (reportGeneralErrors) unitError(pos, msg1) + if (reportErrors) unitError(pos, msg1) else throw new TypeError(pos, msg1) } - def warning(pos: Position, msg: String) = { - if (reportGeneralErrors) unit.warning(pos, msg) - } - - def ambiguousError(pos: Position, pre: Type, sym1: Symbol, sym2: Symbol, rest: String) { - val (reportPos, msg) = ( - if (sym1.hasDefaultFlag && sym2.hasDefaultFlag && sym1.enclClass == sym2.enclClass) { - val methodName = nme.defaultGetterToMethod(sym1.name) - (sym1.enclClass.pos, - "in "+ sym1.enclClass +", multiple overloaded alternatives of " + methodName + - " define default arguments") - } - else { - (pos, - ("ambiguous reference to overloaded definition,\n" + - "both " + sym1 + sym1.locationString + " of type " + pre.memberType(sym1) + - "\nand " + sym2 + sym2.locationString + " of type " + pre.memberType(sym2) + - "\nmatch " + rest) - ) - } - ) - if (reportAmbiguousErrors) { - if (!pre.isErroneous && !sym1.isErroneous && !sym2.isErroneous) - unit.error(reportPos, msg) - } else throw new TypeError(pos, msg) + def warning(pos: Position, msg: String): Unit = warning(pos, msg, false) + def warning(pos: Position, msg: String, force: Boolean) { + if (reportErrors || force) unit.warning(pos, msg) } def isLocal(): Boolean = tree match { @@ -343,8 +376,8 @@ trait Contexts { self: Analyzer => def enclosingContextChain: List[Context] = this :: outer.enclosingContextChain - override def toString = "Context(%s@%s unit=%s scope=%s)".format( - owner.fullName, tree.shortClass, unit, scope.## + override def toString = "Context(%s@%s unit=%s scope=%s errors=%b)".format( + owner.fullName, tree.shortClass, unit, scope.##, hasErrors ) /** Is `sub` a subclass of `base` or a companion object of such a subclass? */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index e14f0bcd87..6cb1d562ce 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -31,20 +31,26 @@ trait Implicits { import typeDebug.{ ptTree, ptBlock, ptLine } import global.typer.{ printTyping, deindentTyping, indentTyping, printInference } + def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = + inferImplicit(tree, pt, reportAmbiguous, isView, context, true) + /** Search for an implicit value. See the comment on `result` at the end of class `ImplicitSearch` * for more info how the search is conducted. - * @param tree The tree for which the implicit needs to be inserted. - * (the inference might instantiate some of the undetermined - * type parameters of that tree. - * @param pt The expected type of the implicit. - * @param reportAmbiguous Should ambiguous implicit errors be reported? - * False iff we search for a view to find out - * whether one type is coercible to another. - * @param isView We are looking for a view - * @param context The current context - * @return A search result + * @param tree The tree for which the implicit needs to be inserted. + * (the inference might instantiate some of the undetermined + * type parameters of that tree. + * @param pt The expected type of the implicit. + * @param reportAmbiguous Should ambiguous implicit errors be reported? + * False iff we search for a view to find out + * whether one type is coercible to another. + * @param isView We are looking for a view + * @param context The current context + * @param saveAmbiguousDivergent False if any divergent/ambiguous errors should be ignored after + * implicits search, + * true if they should be reported (used in further typechecking). + * @return A search result */ - def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context): SearchResult = { + def inferImplicit(tree: Tree, pt: Type, reportAmbiguous: Boolean, isView: Boolean, context: Context, saveAmbiguousDivergent: Boolean): SearchResult = { printInference("[infer %s] %s with pt=%s in %s".format( if (isView) "view" else "implicit", tree, pt, context.owner.enclClass) @@ -64,8 +70,10 @@ trait Implicits { val start = startTimer(implicitNanos) if (printInfers && !tree.isEmpty && !context.undetparams.isEmpty) printTyping("typing implicit: %s %s".format(tree, context.undetparamsString)) - - val result = new ImplicitSearch(tree, pt, isView, context.makeImplicit(reportAmbiguous)).bestImplicit + val implicitSearchContext = context.makeImplicit(reportAmbiguous) + val result = new ImplicitSearch(tree, pt, isView, implicitSearchContext).bestImplicit + if (saveAmbiguousDivergent && implicitSearchContext.hasErrors) + context.updateBuffer(implicitSearchContext.errBuffer.filter(err => err.kind == ErrorKinds.Ambiguous || err.kind == ErrorKinds.Divergent)) printInference("[infer implicit] inferred " + result) context.undetparams = context.undetparams filterNot result.subst.from.contains @@ -244,7 +252,8 @@ trait Implicits { * @param isView We are looking for a view * @param context0 The context used for the implicit search */ - class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context) extends Typer(context0) { + class ImplicitSearch(tree: Tree, pt: Type, isView: Boolean, context0: Context) + extends Typer(context0) with ImplicitsContextErrors { printTyping( ptBlock("new ImplicitSearch", "tree" -> tree, @@ -327,50 +336,6 @@ trait Implicits { incCounter(implicitSearchCount) - /** Issues an error signalling ambiguous implicits */ - private def ambiguousImplicitError(info1: ImplicitInfo, info2: ImplicitInfo, - pre1: String, pre2: String, trailer: String) = - if (!info1.tpe.isErroneous && !info2.tpe.isErroneous) { - val coreMsg = - pre1+" "+info1.sym.fullLocationString+" of type "+info1.tpe+"\n "+ - pre2+" "+info2.sym.fullLocationString+" of type "+info2.tpe+"\n "+ - trailer - error(tree.pos, - if (isView) { - val found = pt.typeArgs(0) - val req = pt.typeArgs(1) - def defaultExplanation = - "Note that implicit conversions are not applicable because they are ambiguous:\n "+ - coreMsg+"are possible conversion functions from "+ found+" to "+req - - def explanation = { - val sym = found.typeSymbol - // Explain some common situations a bit more clearly. - if (AnyRefClass.tpe <:< req) { - if (sym == AnyClass || sym == UnitClass) { - "Note: " + sym.name + " is not implicitly converted to AnyRef. You can safely\n" + - "pattern match `x: AnyRef` or cast `x.asInstanceOf[AnyRef]` to do so." - } - else boxedClass get sym match { - case Some(boxed) => - "Note: an implicit exists from " + sym.fullName + " => " + boxed.fullName + ", but\n" + - "methods inherited from Object are rendered ambiguous. This is to avoid\n" + - "a blanket implicit which would convert any " + sym.fullName + " to any AnyRef.\n" + - "You may wish to use a type ascription: `x: " + boxed.fullName + "`." - case _ => - defaultExplanation - } - } - else defaultExplanation - } - - typeErrorMsg(found, req) + "\n" + explanation - } - else { - "ambiguous implicit values:\n "+coreMsg + "match expected type "+pt - }) - } - /** The type parameters to instantiate */ val undetParams = if (isView) List() else context.outer.undetparams @@ -400,9 +365,7 @@ trait Implicits { // println("DivergentImplicit for pt:"+ pt +", open implicits:"+context.openImplicits) //@MDEBUG if (context.openImplicits.tail.isEmpty) { if (!(pt.isErroneous)) - context.unit.error( - tree.pos, "diverging implicit expansion for type "+pt+"\nstarting with "+ - info.sym.fullLocationString) + DivergingImplicitExpansionError(tree, pt, info.sym)(context) SearchFailure } else { throw DivergentImplicit @@ -578,6 +541,9 @@ trait Implicits { else typed1(itree, EXPRmode, wildPt) + if (context.hasErrors) + return fail("typed implicit %s has errors".format(info.sym.fullLocationString)) + incCounter(typedImplicits) printTyping("typed implicit %s:%s, pt=%s".format(itree1, itree1.tpe, wildPt)) @@ -597,8 +563,8 @@ trait Implicits { } } - if (itree2.tpe.isError) - SearchFailure + if (context.hasErrors) + fail("hasMatchingSymbol reported threw error(s)") else if (!hasMatchingSymbol(itree1)) fail("candidate implicit %s is shadowed by other implicit %s".format( info.sym.fullLocationString, itree1.symbol.fullLocationString)) @@ -620,7 +586,9 @@ trait Implicits { false, lubDepth(List(itree2.tpe, pt))) // #2421: check that we correctly instantiated type parameters outside of the implicit tree: - checkBounds(itree2.pos, NoPrefix, NoSymbol, undetParams, targs, "inferred ") + checkBounds(itree2, NoPrefix, NoSymbol, undetParams, targs, "inferred ") + if (context.hasErrors) + return fail("type parameters weren't correctly instantiated outside of the implicit tree") // filter out failures from type inference, don't want to remove them from undetParams! // we must be conservative in leaving type params in undetparams @@ -646,21 +614,29 @@ trait Implicits { // re-typecheck) // TODO: the return tree is ignored. This seems to make // no difference, but it's bad practice regardless. - itree2 match { + + + val checked = itree2 match { case TypeApply(fun, args) => typedTypeApply(itree2, EXPRmode, fun, args) case Apply(TypeApply(fun, args), _) => typedTypeApply(itree2, EXPRmode, fun, args) // t2421c case t => t } - val result = new SearchResult(itree2, subst) - incCounter(foundImplicits) - printInference("[success] found %s for pt %s".format(result, ptInstantiated)) - result + + if (context.hasErrors) + fail("typing TypeApply reported errors for the implicit tree") + else { + val result = new SearchResult(checked, subst) + incCounter(foundImplicits) + printInference("[success] found %s for pt %s".format(result, ptInstantiated)) + result + } } else fail("incompatible: %s does not match expected type %s".format(itree2.tpe, ptInstantiated)) } } catch { - case ex: TypeError => fail(ex.getMessage()) + case ex: TypeError => + fail(ex.getMessage()) } } @@ -794,7 +770,11 @@ trait Implicits { catch divergenceHandler tryImplicitInfo(i) match { - case SearchFailure => rankImplicits(is, acc) + case SearchFailure => + // We don't want errors that occur during checking implicit info + // to influence the check of further infos. + context.condBufferFlush(_.kind != ErrorKinds.Divergent) + rankImplicits(is, acc) case newBest => best = newBest val newPending = undoLog undo { @@ -829,7 +809,8 @@ trait Implicits { case chosen :: rest => rest find (alt => !improves(chosen, alt)) match { case Some(competing) => - ambiguousImplicitError(chosen, competing, "both", "and", "") + AmbiguousImplicitError(chosen, competing, "both", "and", "")(isView, pt, tree)(context) + return SearchFailure // Stop the search once ambiguity is encountered, see t4457_2.scala case _ => if (isView) chosen.useCountView += 1 else chosen.useCountArg += 1 @@ -1230,12 +1211,14 @@ trait Implicits { incCounter(inscopeImplicitHits) } if (result == SearchFailure) { + val previousErrs = context.flushAndReturnBuffer() val failstart = startTimer(oftypeFailNanos) val succstart = startTimer(oftypeSucceedNanos) result = implicitManifestOrOfExpectedType(pt) if (result == SearchFailure) { + context.updateBuffer(previousErrs) stopTimer(oftypeFailNanos, failstart) } else { stopTimer(oftypeSucceedNanos, succstart) diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index 9db291a306..eac657da19 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -191,12 +191,14 @@ trait Infer { private val stdErrorValue = stdErrorClass.newErrorValue(nme.ERROR) /** The context-dependent inferencer part */ - class Inferencer(context: Context) { + class Inferencer(context: Context) extends InferencerContextErrors { + import InferErrorGen._ + /* -- Error Messages --------------------------------------------------- */ def setError[T <: Tree](tree: T): T = { def name = newTermName("") - def errorClass = if (context.reportGeneralErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass - def errorValue = if (context.reportGeneralErrors) context.owner.newErrorValue(name) else stdErrorValue + def errorClass = if (context.reportErrors) context.owner.newErrorClass(name.toTypeName) else stdErrorClass + def errorValue = if (context.reportErrors) context.owner.newErrorValue(name) else stdErrorValue def errorSym = if (tree.isType) errorClass else errorValue if (tree.hasSymbol) @@ -205,59 +207,12 @@ trait Infer { tree setType ErrorType } - def error(pos: Position, msg: String) { - context.error(pos, msg) - } - - def errorTree(tree: Tree, msg: String): Tree = { - if (!tree.isErroneous) error(tree.pos, msg) - setError(tree) - } - - def typeError(pos: Position, found: Type, req: Type) { - if (!found.isErroneous && !req.isErroneous) { - error(pos, withAddendum(pos)(typeErrorMsg(found, req))) - - if (settings.explaintypes.value) - explainTypes(found, req) - } - } - - def typeErrorMsg(found: Type, req: Type) = { - def isPossiblyMissingArgs = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req) - def missingArgsMsg = if (isPossiblyMissingArgs) "\n possible cause: missing arguments for method or constructor" else "" - - "type mismatch" + foundReqMsg(found, req) + missingArgsMsg - } - - def typeErrorTree(tree: Tree, found: Type, req: Type): Tree = { - // If the expected type is a refinement type, and the found type is a refinement or an anon - // class, we can greatly improve the error message by retyping the tree to recover the actual - // members present, then display along with the expected members. This is done here because - // this is the last point where we still have access to the original tree, rather than just - // the found/req types. - val foundType: Type = req.normalize match { - case RefinedType(parents, decls) if !decls.isEmpty && found.typeSymbol.isAnonOrRefinementClass => - val retyped = typer typed (tree.duplicate setType null) - val foundDecls = retyped.tpe.decls filter (sym => !sym.isConstructor && !sym.isSynthetic) - - if (foundDecls.isEmpty) found - else { - // The members arrive marked private, presumably because there was no - // expected type and so they're considered members of an anon class. - foundDecls foreach (_ resetFlag (PRIVATE | PROTECTED)) - // TODO: if any of the found parents match up with required parents after normalization, - // print the error so that they match. The major beneficiary there would be - // java.lang.Object vs. AnyRef. - refinedType(found.parents, found.typeSymbol.owner, foundDecls, tree.pos) - } - case _ => - found - } - typeError(tree.pos, foundType, req) - setError(tree) - } + def getContext = context + def issue(err: AbsTypeError): Unit = context.issue(err) + + def isPossiblyMissingArgs(found: Type, req: Type) = (found.resultApprox ne found) && isWeaklyCompatible(found.resultApprox, req) + def explainTypes(tp1: Type, tp2: Type) = withDisambiguation(List(), tp1, tp2)(global.explainTypes(tp1, tp2)) @@ -279,7 +234,6 @@ trait Infer { var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG - if (sym1 == NoSymbol && sym.isJavaDefined && context.unit.isJava) // don't try to second guess Java; see #4402 sym1 = sym @@ -289,7 +243,7 @@ trait Infer { Console.println(tree) Console.println("" + pre + " " + sym.owner + " " + context.owner + " " + context.outer.enclClass.owner + " " + sym.owner.thisType + (pre =:= sym.owner.thisType)) } - new AccessError(tree, sym, pre, + ErrorUtils.issueTypeError(AccessError(tree, sym, pre, context.enclClass.owner, if (settings.check.isDefault) analyzer.lastAccessCheckDetails else @@ -303,7 +257,8 @@ trait Infer { "context.owner" -> context.owner, "context.outer.enclClass.owner" -> context.outer.enclClass.owner ) - ) + ))(context) + setError(tree) } else { if (sym1.isTerm) @@ -316,10 +271,11 @@ trait Infer { if (settings.debug.value) ex.printStackTrace val sym2 = underlyingSymbol(sym1) val itype = pre.memberType(sym2) - new AccessError(tree, sym, pre, - "\n because its instance type "+itype+ - (if ("malformed type: "+itype.toString==ex.msg) " is malformed" - else " contains a "+ex.msg)).emit() + ErrorUtils.issueTypeError( + AccessError(tree, sym, pre, context.enclClass.owner, + "\n because its instance type "+itype+ + (if ("malformed type: "+itype.toString==ex.msg) " is malformed" + else " contains a "+ex.msg)))(context) ErrorType } } @@ -762,25 +718,20 @@ trait Infer { false } - /** Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors). + /** + * Todo: Try to make isApplicable always safe (i.e. not cause TypeErrors). + * The chance of TypeErrors should be reduced through context errors */ private[typechecker] def isApplicableSafe(undetparams: List[Symbol], ftpe: Type, argtpes0: List[Type], pt: Type): Boolean = { - val reportAmbiguousErrors = context.reportAmbiguousErrors - context.reportAmbiguousErrors = false - try { - isApplicable(undetparams, ftpe, argtpes0, pt) - } catch { - case ex: TypeError => - try { - isApplicable(undetparams, ftpe, argtpes0, WildcardType) - } catch { - case ex: TypeError => - false - } - } finally { - context.reportAmbiguousErrors = reportAmbiguousErrors - } + val silentContext = context.makeSilent(false) + val typer0 = newTyper(silentContext) + val res1 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, pt) + if (pt != WildcardType && silentContext.hasErrors) { + silentContext.flushBuffer() + val res2 = typer0.infer.isApplicable(undetparams, ftpe, argtpes0, WildcardType) + if (silentContext.hasErrors) false else res2 + } else res1 } /** Is type ftpe1 strictly more specific than type ftpe2 @@ -942,38 +893,22 @@ trait Infer { */ /** error if arguments not within bounds. */ - def checkBounds(pos: Position, pre: Type, owner: Symbol, - tparams: List[Symbol], targs: List[Type], prefix: String) = { + def checkBounds(tree: Tree, pre: Type, owner: Symbol, + tparams: List[Symbol], targs: List[Type], prefix: String): Boolean = { //@M validate variances & bounds of targs wrt variances & bounds of tparams //@M TODO: better place to check this? //@M TODO: errors for getters & setters are reported separately val kindErrors = checkKindBounds(tparams, targs, pre, owner) - if (!kindErrors.isEmpty) { - if (targs contains WildcardType) () - else error(pos, - prefix + "kinds of the type arguments " + targs.mkString("(", ",", ")") + - " do not conform to the expected kinds of the type parameters "+ - tparams.mkString("(", ",", ")") + tparams.head.locationString+ "." + - kindErrors.toList.mkString("\n", ", ", "")) - } - else if (!isWithinBounds(pre, owner, tparams, targs)) { + if(!kindErrors.isEmpty) { + if (targs contains WildcardType) true + else { KindBoundErrors(tree, prefix, targs, tparams, kindErrors); false } + } else if (!isWithinBounds(pre, owner, tparams, targs)) { if (!(targs exists (_.isErroneous)) && !(tparams exists (_.isErroneous))) { - //val bounds = instantiatedBounds(pre, owner, tparams, targs)//DEBUG - //println("bounds = "+bounds+", targs = "+targs+", targclasses = "+(targs map (_.getClass))+", parents = "+(targs map (_.parents))) - //println(List.map2(bounds, targs)((bound, targ) => bound containsType targ)) - error(pos, - prefix + "type arguments " + targs.mkString("[", ",", "]") + - " do not conform to " + tparams.head.owner + "'s type parameter bounds " + - (tparams map (_.defString)).mkString("[", ",", "]")) - if (settings.explaintypes.value) { - val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, targs).bounds) - (targs, bounds).zipped foreach ((targ, bound) => explainTypes(bound.lo, targ)) - (targs, bounds).zipped foreach ((targ, bound) => explainTypes(targ, bound.hi)) - () - } - } - } + NotWithinBounds(tree, prefix, targs, tparams, kindErrors) + false + } else true + } else true } def checkKindBounds(tparams: List[Symbol], targs: List[Type], pre: Type, owner: Symbol): List[String] = { @@ -1055,8 +990,7 @@ trait Infer { targs: List[Type], pt: Type) { if (targs eq null) { if (!tree.tpe.isErroneous && !pt.isErroneous) - error(tree.pos, "polymorphic expression cannot be instantiated to expected type" + - foundReqMsg(polyType(undetparams, skipImplicit(tree.tpe)), pt)) + PolymorphicExpressionInstantiationError(tree, undetparams, pt) } else { new TreeTypeSubstituter(undetparams, targs).traverse(tree) } @@ -1092,27 +1026,24 @@ trait Infer { (okparams map (_.name), okargs).zipped.map(_ + "=" + _).mkString("solved: ", ", ", "") )) - checkBounds(fn.pos, NoPrefix, NoSymbol, undetparams, allargs, "inferred ") - val treeSubst = new TreeTypeSubstituter(okparams, okargs) - treeSubst traverseTrees fn :: args + if (checkBounds(fn, NoPrefix, NoSymbol, undetparams, allargs, "inferred ")) { + val treeSubst = new TreeTypeSubstituter(okparams, okargs) + treeSubst traverseTrees fn :: args - leftUndet match { - case Nil => Nil - case xs => - // #3890 - val xs1 = treeSubst.typeMap mapOver xs - if (xs ne xs1) - new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args + leftUndet match { + case Nil => Nil + case xs => + // #3890 + val xs1 = treeSubst.typeMap mapOver xs + if (xs ne xs1) + new TreeSymSubstTraverser(xs, xs1) traverseTrees fn :: args - xs1 - } + xs1 + } + } else Nil } catch ifNoInstance { msg => - errorTree(fn, "no type parameters for " + - applyErrorMsg(fn, " exist so that it can be applied to arguments ", args map (_.tpe.widen), WildcardType) + - "\n --- because ---\n" + msg - ) - Nil + NoMethodInstanceError(fn, args, msg); List() } } @@ -1137,20 +1068,16 @@ trait Infer { try { val targs = solvedTypes(tvars, undetparams, undetparams map varianceInType(restpe), true, lubDepth(List(restpe, pt))) -// checkBounds(tree.pos, NoPrefix, NoSymbol, undetparams, targs, "inferred ") +// checkBounds(tree, NoPrefix, NoSymbol, undetparams, targs, "inferred ") // no checkBounds here. If we enable it, test bug602 fails. new TreeTypeSubstituter(undetparams, targs).traverse(tree) - } catch { - case ex: NoInstance => - errorTree(tree, "constructor of type " + restpe + - " cannot be uniquely instantiated to expected type " + pt + - "\n --- because ---\n" + ex.getMessage()) + } catch ifNoInstance{ msg => + NoConstructorInstanceError(tree, restpe, pt, msg) } def instError = { if (settings.debug.value) Console.println("ici " + tree + " " + undetparams + " " + pt) if (settings.explaintypes.value) explainTypes(restpe.instantiateTypeParams(undetparams, tvars), pt) - errorTree(tree, "constructor cannot be instantiated to expected type" + - foundReqMsg(restpe, pt)) + ConstrInstantiationError(tree, restpe, pt) } if (restpe.instantiateTypeParams(undetparams, tvars) <:< pt) { computeArgs @@ -1220,9 +1147,9 @@ trait Infer { } } - def checkCheckable(pos: Position, tp: Type, kind: String) { + def checkCheckable(tree: Tree, tp: Type, kind: String) { def patternWarning(tp0: Type, prefix: String) = { - context.unit.uncheckedWarning(pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure") + context.unit.uncheckedWarning(tree.pos, prefix+tp0+" in type "+kind+tp+" is unchecked since it is eliminated by erasure") } def check(tp: Type, bound: List[Symbol]) { def isLocalBinding(sym: Symbol) = @@ -1241,7 +1168,7 @@ trait Infer { } else if (sym.isAliasType) { check(tp.normalize, bound) } else if (sym == NothingClass || sym == NullClass || sym == AnyValClass) { - error(pos, "type "+tp+" cannot be used in a type pattern or isInstanceOf test") + TypePatternOrIsInstanceTestError(tree, tp) } else { for (arg <- args) { if (sym == ArrayClass) check(arg, bound) @@ -1265,11 +1192,12 @@ trait Infer { case ExistentialType(quantified, tp1) => check(tp1, bound ::: quantified) case ThisType(_) => - ; + () case NoPrefix => - ; + () case _ => patternWarning(tp, "type ") + () } } check(tp, List()) @@ -1292,7 +1220,7 @@ trait Infer { } } - def inferTypedPattern(pos: Position, pattp: Type, pt0: Type): Type = { + def inferTypedPattern(tree0: Tree, pattp: Type, pt0: Type): Type = { val pt = widen(pt0) val ptparams = freeTypeParamsOfTerms.collect(pt) val tpparams = freeTypeParamsOfTerms.collect(pattp) @@ -1304,10 +1232,12 @@ trait Infer { * This is the case if the scrutinee has no unresolved type arguments * and is a "final type", meaning final + invariant in all type parameters. */ - if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) - error(pos, "scrutinee is incompatible with pattern type" + foundReqMsg(pattp, pt)) + if (pt.isFinalType && ptparams.isEmpty && !ptMatchesPattp) { + IncompatibleScrutineeTypeError(tree0, pattp, pt) + return ErrorType + } - checkCheckable(pos, pattp, "pattern ") + checkCheckable(tree0, pattp, "pattern ") if (pattp <:< pt) () else { debuglog("free type params (1) = " + tpparams) @@ -1330,8 +1260,8 @@ trait Infer { if (isPopulated(tp, pt1) && isInstantiatable(tvars ++ ptvars) || pattpMatchesPt) ptvars foreach instantiateTypeVar else { - error(pos, "pattern type is incompatible with expected type" + foundReqMsg(pattp, pt)) - return pattp + PatternTypeIncompatibleWithPtError1(tree0, pattp, pt) + return ErrorType } } tvars foreach instantiateTypeVar @@ -1352,30 +1282,8 @@ trait Infer { val pt1 = pt.instantiateTypeParams(ptparams, ptvars) if (pat.tpe <:< pt1) ptvars foreach instantiateTypeVar - else { - val sym = pat.tpe.typeSymbol - val clazz = sym.companionClass - val addendum = ( - if (sym.isModuleClass && clazz.isCaseClass && (clazz isSubClass pt1.typeSymbol)) { - // TODO: move these somewhere reusable. - val typeString = clazz.typeParams match { - case Nil => "" + clazz.name - case xs => xs map (_ => "_") mkString (clazz.name + "[", ",", "]") - } - val caseString = ( - clazz.caseFieldAccessors - map (_ => "_") // could use the actual param names here - mkString (clazz.name + "(", ",", ")") - ) - ( - "\nNote: if you intended to match against the class, try `case _: " + - typeString + "` or `case " + caseString + "`" - ) - } - else "" - ) - error(pat.pos, "pattern type is incompatible with expected type"+foundReqMsg(pat.tpe, pt) + addendum) - } + else + PatternTypeIncompatibleWithPtError2(pat, pt1, pt) } object toOrigin extends TypeMap { @@ -1452,7 +1360,7 @@ trait Infer { * If several alternatives match `pt`, take parameterless one. * If no alternative matches `pt`, take the parameterless one anyway. */ - def inferExprAlternative(tree: Tree, pt: Type): Unit = tree.tpe match { + def inferExprAlternative(tree: Tree, pt: Type) = tree.tpe match { case OverloadedType(pre, alts) => tryTwice { val alts0 = alts filter (alt => isWeaklyCompatible(pre.memberType(alt), pt)) val secondTry = alts0.isEmpty @@ -1483,15 +1391,10 @@ trait Infer { case _ => } } - typeErrorTree(tree, tree.symbol.tpe, pt) + NoBestExprAlternativeError(tree, pt) } else if (!competing.isEmpty) { - if (secondTry) { - typeErrorTree(tree, tree.symbol.tpe, pt) - } else { - if (!pt.isErroneous) - context.ambiguousError(tree.pos, pre, best, competing.head, "expected type " + pt) - setError(tree) - } + if (secondTry) NoBestExprAlternativeError(tree, pt) + else { if (!pt.isErroneous) AmbiguousExprAlternativeError(tree, pre, best, competing.head, pt) } } else { // val applicable = alts1 filter (alt => // global.typer.infer.isWeaklyCompatible(pre.memberType(alt), pt)) @@ -1501,9 +1404,11 @@ trait Infer { } } - @inline private def wrapTypeError(expr: => Boolean): Boolean = - try expr - catch { case _: TypeError => false } + @inline private def inSilentMode(expr: Typer => Boolean): Boolean = { + val silentContext = context.makeSilent(context.ambiguousErrors) + val res = expr(newTyper(silentContext)) + if (silentContext.hasErrors) false else res + } // Checks against the name of the parameter and also any @deprecatedName. private def paramMatchesName(param: Symbol, name: Name) = @@ -1573,9 +1478,7 @@ trait Infer { val applicable = resolveOverloadedMethod(argtpes, { alts filter { alt => - // TODO: this will need to be re-written once we substitute throwing exceptions - // with generating error trees. We wrap this applicability in try/catch because of #4457. - wrapTypeError(isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) && + inSilentMode(typer0 => typer0.infer.isApplicable(undetparams, followApply(pre.memberType(alt)), argtpes, pt)) && (!varArgsOnly || isVarArgsList(alt.tpe.params)) } }) @@ -1591,16 +1494,13 @@ trait Infer { if (improves(alt, best)) alt else best) val competing = applicable.dropWhile(alt => best == alt || improves(best, alt)) if (best == NoSymbol) { - if (pt == WildcardType) { - errorTree(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) - } else { + if (pt == WildcardType) + NoBestMethodAlternativeError(tree, argtpes, pt) + else inferMethodAlternative(tree, undetparams, argtpes, WildcardType) - } } else if (!competing.isEmpty) { if (!(argtpes exists (_.isErroneous)) && !pt.isErroneous) - context.ambiguousError(tree.pos, pre, best, competing.head, - "argument types " + argtpes.mkString("(", ",", ")") + - (if (pt == WildcardType) "" else " and expected result type " + pt)) + AmbiguousMethodAlternativeError(tree, pre, best, competing.head, argtpes, pt) setError(tree) () } else { @@ -1616,18 +1516,27 @@ trait Infer { * * @param infer ... */ - def tryTwice(infer: => Unit) { + def tryTwice(infer: => Unit): Unit = { if (context.implicitsEnabled) { - val reportGeneralErrors = context.reportGeneralErrors - context.reportGeneralErrors = false - try context.withImplicitsDisabled(infer) - catch { - case ex: CyclicReference => throw ex - case ex: TypeError => - context.reportGeneralErrors = reportGeneralErrors + val saved = context.state + var fallback = false + context.setBufferErrors() + val res = try { + context.withImplicitsDisabled(infer) + if (context.hasErrors) { + fallback = true + context.restoreState(saved) + context.flushBuffer() infer + } + } catch { + case ex: CyclicReference => throw ex + case ex: TypeError => // recoverable cyclic references + context.restoreState(saved) + if (!fallback) infer else () } - context.reportGeneralErrors = reportGeneralErrors + context.restoreState(saved) + res } else infer } @@ -1642,13 +1551,13 @@ trait Infer { def inferPolyAlternatives(tree: Tree, argtypes: List[Type]): Unit = { val OverloadedType(pre, alts) = tree.tpe val sym0 = tree.symbol filter (alt => sameLength(alt.typeParams, argtypes)) - def fail(msg: String): Unit = error(tree.pos, msg) + def fail(kind: PolyAlternativeErrorKind.ErrorType) = + PolyAlternativeError(tree, argtypes, sym0, kind) - if (sym0 == NoSymbol) return fail( + if (sym0 == NoSymbol) return ( if (alts exists (_.typeParams.nonEmpty)) - "wrong number of type parameters for " + treeSymTypeMsg(tree) - else treeSymTypeMsg(tree) + " does not take type parameters" - ) + fail(PolyAlternativeErrorKind.WrongNumber) + else fail(PolyAlternativeErrorKind.NoParams)) val (resSym, resTpe) = { if (!sym0.isOverloaded) @@ -1656,11 +1565,8 @@ trait Infer { else { val sym = sym0 filter (alt => isWithinBounds(pre, alt.owner, alt.typeParams, argtypes)) if (sym == NoSymbol) { - if (argtypes forall (x => !x.isErroneous)) fail( - "type arguments " + argtypes.mkString("[", ",", "]") + - " conform to the bounds of none of the overloaded alternatives of\n "+sym0+ - ": "+sym0.info - ) + if (argtypes forall (x => !x.isErroneous)) + fail(PolyAlternativeErrorKind.ArgsDoNotConform) return } else if (sym.isOverloaded) { @@ -1677,24 +1583,6 @@ trait Infer { // Side effects tree with symbol and type tree setSymbol resSym setType resTpe } - - abstract class TreeForwarder(forwardTo: Tree) extends Tree { - override def pos = forwardTo.pos - override def hasSymbol = forwardTo.hasSymbol - override def symbol = forwardTo.symbol - override def symbol_=(x: Symbol) = forwardTo.symbol = x - } - - case class AccessError(tree: Tree, sym: Symbol, pre: Type, explanation: String) extends TreeForwarder(tree) { - setError(this) - - // @PP: It is improbable this logic shouldn't be in use elsewhere as well. - private def location = if (sym.isClassConstructor) context.enclClass.owner else pre.widen - def emit(): Tree = { - val realsym = underlyingSymbol(sym) - errorTree(tree, realsym.fullLocationString + " cannot be accessed in " + location + explanation) - } - } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 7f9e56a926..c63ae90ef6 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -97,8 +97,6 @@ trait Macros { self: Analyzer => override def defaultReflectiveClassLoader() = libraryClassLoader } - class MacroExpandError(val msg: String) extends Exception(msg) - /** Return optionally address of companion object and implementation method symbol * of given macro; or None if implementation classfile cannot be loaded or does * not contain the macro implementation. @@ -127,7 +125,7 @@ trait Macros { self: Analyzer => * Or, if that fails, and the macro overrides a method return * tree that calls this method instead of the macro. */ - def macroExpand(tree: Tree): Any = { + def macroExpand(tree: Tree, context: Context): Option[Any] = { val macroDef = tree.symbol macroImpl(macroDef) match { case Some((receiver, rmeth)) => @@ -139,41 +137,55 @@ trait Macros { self: Analyzer => } val rawArgs: Seq[Any] = rawArgss.flatten try { - mirror.invoke(receiver, rmeth, rawArgs: _*) + Some(mirror.invoke(receiver, rmeth, rawArgs: _*)) } catch { case ex => val realex = ReflectionUtils.unwrapThrowable(ex) val stacktrace = new java.io.StringWriter() realex.printStackTrace(new java.io.PrintWriter(stacktrace)) val msg = System.getProperty("line.separator") + stacktrace - throw new MacroExpandError("exception during macro expansion: " + msg) + context.unit.error(tree.pos, "exception during macro expansion: " + msg) + None } case None => val trace = scala.tools.nsc.util.trace when settings.debug.value - def notFound() = throw new MacroExpandError("macro implementation not found: " + macroDef.name) - def fallBackToOverridden(tree: Tree): Tree = { + def notFound() = { + context.unit.error(tree.pos, "macro implementation not found: " + macroDef.name) + None + } + def fallBackToOverridden(tree: Tree): Option[Tree] = { tree match { case Select(qual, name) if (macroDef.isMacro) => macroDef.allOverriddenSymbols match { - case first :: others => - return Select(qual, name) setPos tree.pos setSymbol first + case first :: _ => + Some(Select(qual, name) setPos tree.pos setSymbol first) case _ => trace("macro is not overridden: ")(tree) notFound() } case Apply(fn, args) => - Apply(fallBackToOverridden(fn), args) setPos tree.pos + fallBackToOverridden(fn) match { + case Some(fn1) => Some(Apply(fn1, args) setPos tree.pos) + case _ => None + } case TypeApply(fn, args) => - TypeApply(fallBackToOverridden(fn), args) setPos tree.pos + fallBackToOverridden(fn) match { + case Some(fn1) => Some(TypeApply(fn1, args) setPos tree.pos) + case _ => None + } case _ => trace("unexpected tree in fallback: ")(tree) notFound() } } - val tree1 = fallBackToOverridden(tree) - trace("falling back to ")(tree1) - currentRun.macroExpansionFailed = true - tree1 + fallBackToOverridden(tree) match { + case Some(tree1) => + trace("falling back to ")(tree1) + currentRun.macroExpansionFailed = true + Some(tree1) + case None => + None + } } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index 29dffd99d6..c6ca9870c3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -32,10 +32,12 @@ trait MethodSynthesis { trait MethodSynth { self: Namer => + import NamerErrorGen._ + def enterGetterSetter(tree: ValDef) { val ValDef(mods, name, _, _) = tree if (nme.isSetterName(name)) - context.error(tree.pos, "Names of vals or vars may not end in `_='") + ValOrValWithSetterSuffixError(tree) val getter = Getter(tree).createAndEnterSymbol() @@ -43,7 +45,7 @@ trait MethodSynthesis { if (mods.isLazy) enterLazyVal(tree, getter) else { if (mods.isPrivateLocal) - context.error(tree.pos, "private[this] not allowed for case class parameters") + PrivateThisCaseClassParameterError(tree) // Create the setter if necessary. if (mods.isMutable) Setter(tree).createAndEnterSymbol() @@ -187,7 +189,7 @@ trait MethodSynthesis { override def validate() { assert(derivedSym != NoSymbol, tree) if (derivedSym.isOverloaded) - context.error(derivedSym.pos, derivedSym+" is defined twice") + GetterDefinedTwiceError(derivedSym) super.validate() } @@ -255,8 +257,7 @@ trait MethodSynthesis { if (derivedSym == NoSymbol) { // the namer decides whether to generate these symbols or not. at that point, we don't // have symbolic information yet, so we only look for annotations named "BeanProperty". - context.error(tree.pos, - "implementation limitation: the BeanProperty annotation cannot be used in a type alias or renamed import") + BeanPropertyAnnotationLimitationError(tree) } super.validate() } @@ -304,9 +305,9 @@ trait MethodSynthesis { val beans = beanAccessorsFromNames(tree) if (beans.nonEmpty) { if (!name(0).isLetter) - context.error(tree.pos, "`BeanProperty' annotation can be applied only to fields that start with a letter") + BeanPropertyAnnotationFieldWithoutLetterError(tree) else if (mods.isPrivate) // avoids name clashes with private fields in traits - context.error(tree.pos, "`BeanProperty' annotation can be applied only to non-private fields") + BeanPropertyAnnotationPrivateFieldError(tree) // Create and enter the symbols here, add the trees in finishGetterSetter. beans foreach (_.createAndEnterSymbol()) diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index e04d89047b..701c69a4bb 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -73,7 +73,9 @@ trait Namers extends MethodSynthesis { classAndNamerOfModule.clear() } - abstract class Namer(val context: Context) extends MethodSynth { + abstract class Namer(val context: Context) extends MethodSynth with NamerContextErrors { + + import NamerErrorGen._ val typer = newTyper(context) private lazy val innerNamer = @@ -109,9 +111,10 @@ trait Namers extends MethodSynthesis { protected def owner = context.owner private def contextFile = context.unit.source.file - private def typeErrorHandler[T](pos: Position, alt: T): PartialFunction[Throwable, T] = { + private def typeErrorHandler[T](tree: Tree, alt: T): PartialFunction[Throwable, T] = { case ex: TypeError => - typer.reportTypeError(pos, ex) + // H@ need to ensure that we handle only cyclic references + TypeSigError(tree, ex) alt } // PRIVATE | LOCAL are fields generated for primary constructor arguments @@ -129,10 +132,17 @@ trait Namers extends MethodSynthesis { || vd.symbol.isLazy ) - def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym = { + def setPrivateWithin[Sym <: Symbol](tree: Tree, sym: Sym, mods: Modifiers): Sym = if (sym.isPrivateLocal || !mods.hasAccessBoundary) sym - else sym setPrivateWithin typer.qualifyingClass(tree, mods.privateWithin, true) - } + else sym setPrivateWithin ( + typer.qualifyingClass(tree, mods.privateWithin, true) match { + case None => + NoSymbol + case Some(sym) => + sym + } + ) + def setPrivateWithin(tree: MemberDef, sym: Symbol): Symbol = setPrivateWithin(tree, sym, tree.mods) @@ -195,14 +205,6 @@ trait Namers extends MethodSynthesis { ) ) - private def doubleDefError(pos: Position, sym: Symbol) { - val s1 = if (sym.isModule) "case class companion " else "" - val s2 = if (sym.isSynthetic) "(compiler-generated) " + s1 else "" - val s3 = if (sym.isCase) "case class " + sym.name else "" + sym - - context.error(pos, sym.name + " is already defined as " + s2 + s3) - } - private def allowsOverload(sym: Symbol) = ( sym.isSourceMethod && sym.owner.isClass && !sym.owner.isPackageClass ) @@ -221,7 +223,7 @@ trait Namers extends MethodSynthesis { if (!allowsOverload(sym)) { val prev = scope.lookupEntry(sym.name) if ((prev ne null) && prev.owner == scope && conflict(sym, prev.sym)) { - doubleDefError(sym.pos, prev.sym) + DoubleDefError(sym, prev.sym) sym setInfo ErrorType scope unlink prev.sym // let them co-exist... // FIXME: The comment "let them co-exist" is confusing given that the @@ -250,7 +252,7 @@ trait Namers extends MethodSynthesis { returnContext } tree.symbol match { - case NoSymbol => try dispatch() catch typeErrorHandler(tree.pos, this.context) + case NoSymbol => try dispatch() catch typeErrorHandler(tree, this.context) case sym => enterExistingSym(sym) } } @@ -447,6 +449,7 @@ trait Namers extends MethodSynthesis { } private def checkSelectors(tree: Import): Unit = { + import DuplicatesErrorKinds._ val Import(expr, selectors) = tree val base = expr.tpe @@ -483,8 +486,10 @@ trait Namers extends MethodSynthesis { typeSig(tree) } // for Java code importing Scala objects - else if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) - notAMemberError(tree.pos, expr, from) + else if (!nme.isModuleName(from) || isValid(nme.stripModuleSuffix(from))) { + typer.TyperErrorGen.NotAMemberError(tree, expr, from) + typer.infer.setError(tree) + } } // Setting the position at the import means that if there is // more than one hidden name, the second will not be warned. @@ -492,20 +497,21 @@ trait Namers extends MethodSynthesis { checkNotRedundant(tree.pos withPoint fromPos, from, to) } } - def noDuplicates(names: List[Name], message: String) { + + def noDuplicates(names: List[Name], check: DuplicatesErrorKinds.Value) { def loop(xs: List[Name]): Unit = xs match { case Nil => () case hd :: tl => if (hd == nme.WILDCARD || !(tl contains hd)) loop(tl) - else context.error(tree.pos, hd.decode + " " + message) + else DuplicatesError(tree, hd, check) } loop(names filterNot (x => x == null || x == nme.WILDCARD)) } selectors foreach checkSelector // checks on the whole set - noDuplicates(selectors map (_.name), "is renamed twice") - noDuplicates(selectors map (_.rename), "appears twice as a target of a renaming") + noDuplicates(selectors map (_.name), RenamedTwice) + noDuplicates(selectors map (_.rename), AppearsTwice) } def enterCopyMethodOrGetter(tree: Tree, tparams: List[TypeDef]): Symbol = { @@ -620,7 +626,7 @@ trait Namers extends MethodSynthesis { if (mods.isCase) { if (treeInfo.firstConstructorArgs(impl.body).size > MaxFunctionArity) - context.error(tree.pos, "Implementation restriction: case classes cannot have more than " + MaxFunctionArity + " parameters.") + MaxParametersCaseClassError(tree) val m = ensureCompanionObject(tree, caseModuleDef) classOfModuleClass(m.moduleClass) = new WeakReference(tree) @@ -823,7 +829,7 @@ trait Namers extends MethodSynthesis { val tp = tpt.tpe val inheritsSelf = tp.typeSymbol == owner if (inheritsSelf) - context.error(tpt.pos, ""+tp.typeSymbol+" inherits itself") + InheritsItselfError(tpt) if (inheritsSelf || tp.isError) AnyRefClass.tpe else tp @@ -918,7 +924,7 @@ trait Namers extends MethodSynthesis { } def thisMethodType(restpe: Type) = { - val checkDependencies = new DependentTypeChecker(context) + val checkDependencies = new DependentTypeChecker(context)(this) checkDependencies check vparamSymss // DEPMETTODO: check not needed when they become on by default checkDependencies(restpe) @@ -994,7 +1000,7 @@ trait Namers extends MethodSynthesis { } mforeach(vparamss) { vparam => if (vparam.tpt.isEmpty) { - context.error(vparam.pos, "missing parameter type") + MissingParameterOrValTypeError(vparam) vparam.tpt defineType ErrorType } } @@ -1262,7 +1268,7 @@ trait Namers extends MethodSynthesis { val typer1 = typer.constrTyperIf(isBeforeSupercall) if (tpt.isEmpty) { if (rhs.isEmpty) { - context.error(tpt.pos, "missing parameter type"); + MissingParameterOrValTypeError(tpt) ErrorType } else assignTypeToTree(vdef, newTyper(typer1.context.make(vdef, sym)), WildcardType) @@ -1276,7 +1282,7 @@ trait Namers extends MethodSynthesis { val expr1 = typer.typedQualifier(expr) typer checkStable expr1 if (expr1.symbol != null && expr1.symbol.isRootPackage) - context.error(tree.pos, "_root_ cannot be imported") + RootImportError(tree) val newImport = treeCopy.Import(tree, expr1, selectors).asInstanceOf[Import] checkSelectors(newImport) @@ -1290,7 +1296,7 @@ trait Namers extends MethodSynthesis { val result = try getSig - catch typeErrorHandler(tree.pos, ErrorType) + catch typeErrorHandler(tree, ErrorType) result match { case PolyType(tparams @ (tp :: _), _) if tp.owner.isTerm => typer.deskolemizeTypeParams(tparams)(result) @@ -1337,43 +1343,43 @@ trait Namers extends MethodSynthesis { * - declarations only in mixins or abstract classes (when not @native) */ def validate(sym: Symbol) { - def fail(msg: String) = context.error(sym.pos, msg) + import SymValidateErrors._ + def fail(kind: SymValidateErrors.Value) = SymbolValidationError(sym, kind) + def checkWithDeferred(flag: Int) { if (sym hasFlag flag) - fail("abstract member may not have " + flagsToString(flag) + " modifier") + AbstractMemberWithModiferError(sym, flag) } def checkNoConflict(flag1: Int, flag2: Int) { if (sym hasAllFlags flag1 | flag2) - fail("illegal combination of modifiers: %s and %s for: %s".format( - flagsToString(flag1), flagsToString(flag2), sym)) + IllegalModifierCombination(sym, flag1, flag2) } if (sym.isImplicit) { if (sym.isConstructor) - fail("`implicit' modifier not allowed for constructors") + fail(ImplicitConstr) if (!sym.isTerm) - fail("`implicit' modifier can be used only for values, variables and methods") + fail(ImplicitNotTerm) if (sym.owner.isPackageClass) - fail("`implicit' modifier cannot be used for top-level objects") + fail(ImplicitTopObject) } if (sym.isClass) { if (sym.isAnyOverride && !sym.hasFlag(TRAIT)) - fail("`override' modifier not allowed for classes") - } - else { + fail(OverrideClass) + } else { if (sym.isSealed) - fail("`sealed' modifier can be used only for classes") + fail(SealedNonClass) if (sym.hasFlag(ABSTRACT)) - fail("`abstract' modifier can be used only for classes; it should be omitted for abstract members") + fail(AbstractNonClass) } if (sym.isConstructor && sym.isAnyOverride) - fail("`override' modifier not allowed for constructors") + fail(OverrideConstr) if (sym.isAbstractOverride && !sym.owner.isTrait) - fail("`abstract override' modifier only allowed for members of traits") + fail(AbstractOverride) if (sym.isLazy && sym.hasFlag(PRESUPER)) - fail("`lazy' definitions may not be initialized early") + fail(LazyAndEarlyInit) if (sym.info.typeSymbol == FunctionClass(0) && sym.isValueParameter && sym.owner.isCaseClass) - fail("pass-by-name arguments not allowed for case class parameters") + fail(ByNameParameter) if (sym.isDeferred) { // Is this symbol type always allowed the deferred flag? @@ -1391,7 +1397,7 @@ trait Namers extends MethodSynthesis { if (sym hasAnnotation NativeAttr) sym resetFlag DEFERRED else if (!symbolAllowsDeferred && ownerRequiresConcrete) - fail("only classes can have declared but undefined members" + abstractVarMessage(sym)) + fail(AbstractVar) checkWithDeferred(PRIVATE) checkWithDeferred(FINAL) @@ -1456,14 +1462,14 @@ trait Namers extends MethodSynthesis { // def foo[T, T2](a: T, x: T2)(implicit w: ComputeT2[T, T2]) // moreover, the latter is not an encoding of the former, which hides type // inference of T2, so you can specify T while T2 is purely computed - private class DependentTypeChecker(ctx: Context) extends TypeTraverser { + private class DependentTypeChecker(ctx: Context)(namer: Namer) extends TypeTraverser { private[this] val okParams = mutable.Set[Symbol]() private[this] val method = ctx.owner def traverse(tp: Type) = tp match { case SingleType(_, sym) => if (sym.owner == method && sym.isValueParameter && !okParams(sym)) - ctx.error(sym.pos, "illegal dependent method type" + errorAddendum) + namer.NamerErrorGen.IllegalDependentMethTpeError(sym)(ctx) case _ => mapOver(tp) } @@ -1476,8 +1482,6 @@ trait Namers extends MethodSynthesis { okParams ++= vps } } - private def errorAddendum = - ": parameter appears in the type of another parameter in the same section or an earlier one" } @deprecated("Use underlyingSymbol instead", "2.10.0") @@ -1506,7 +1510,7 @@ trait Namers extends MethodSynthesis { } catch { case e: InvalidCompanions => - ctx.error(original.pos, e.getMessage) + ctx.unit.error(original.pos, e.getMessage) NoSymbol } } diff --git a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala index 79cb211215..3a3c244d1c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala +++ b/src/compiler/scala/tools/nsc/typechecker/NamesDefaults.scala @@ -17,6 +17,7 @@ trait NamesDefaults { self: Analyzer => import global._ import definitions._ + import NamesDefaultsErrorsGen._ val defaultParametersOfMethod = perRunCaches.newWeakMap[Symbol, Set[Symbol]]() withDefaultValue Set() @@ -312,8 +313,7 @@ trait NamesDefaults { self: Analyzer => // type the application without names; put the arguments in definition-site order val typedApp = doTypedApply(tree, funOnly, reorderArgs(namelessArgs, argPos), mode, pt) - - if (typedApp.tpe.isError) setError(tree) + if (typedApp.isErrorTyped) tree else typedApp match { // Extract the typed arguments, restore the call-site evaluation order (using // ValDef's in the block), change the arguments to these local values. @@ -384,6 +384,7 @@ trait NamesDefaults { self: Analyzer => if (missing forall (_.hasDefaultFlag)) { val defaultArgs = missing flatMap (p => { val defGetter = defaultGetter(p, context) + // TODO #3649 can create spurious errors when companion object is gone (because it becomes unlinked from scope) if (defGetter == NoSymbol) None // prevent crash in erroneous trees, #3649 else { var default1 = qual match { @@ -434,12 +435,12 @@ trait NamesDefaults { self: Analyzer => private def savingUndeterminedTParams[T](context: Context)(fn: List[Symbol] => T): T = { val savedParams = context.extractUndetparams() - val savedReporting = context.reportAmbiguousErrors + val savedReporting = context.ambiguousErrors - context.reportAmbiguousErrors = false + context.setAmbiguousErrors(false) try fn(savedParams) finally { - context.reportAmbiguousErrors = savedReporting + context.setAmbiguousErrors(savedReporting) //@M note that we don't get here when an ambiguity was detected (during the computation of res), // as errorTree throws an exception context.undetparams = savedParams @@ -488,7 +489,7 @@ trait NamesDefaults { self: Analyzer => // is called, and EmptyTree can only be typed NoType. Thus we need to // disable conforms as a view... try typer.silent(_.typed(arg, subst(paramtpe))) match { - case t: Tree => !t.isErroneous + case SilentResultValue(t) => !t.isErroneous // #4041 case _ => false } catch { @@ -496,9 +497,7 @@ trait NamesDefaults { self: Analyzer => // CyclicReferences. Fix for #3685 case cr @ CyclicReference(sym, _) => (sym.name == param.name) && sym.accessedOrSelf.isVariable && { - context.error(sym.pos, - "variable definition needs type because '%s' is used as a named argument in its body.".format(sym.name)) - typer.infer.setError(arg) + NameClashError(sym, arg)(typer.context) true } } @@ -514,18 +513,17 @@ trait NamesDefaults { self: Analyzer => * after named ones. */ def removeNames(typer: Typer)(args: List[Tree], params: List[Symbol]): (List[Tree], Array[Int]) = { - import typer.context + implicit val context0 = typer.context // maps indices from (order written by user) to (order of definition) val argPos = Array.fill(args.length)(-1) var positionalAllowed = true val namelessArgs = mapWithIndex(args) { (arg, index) => - def fail(msg: String) = typer.infer.errorTree(arg, msg) arg match { case arg @ AssignOrNamedArg(Ident(name), rhs) => def matchesName(param: Symbol) = !param.isSynthetic && ( (param.name == name) || (param.deprecatedParamName match { case Some(`name`) => - context.unit.deprecationWarning(arg.pos, + context0.unit.deprecationWarning(arg.pos, "the parameter name "+ name +" has been deprecated. Use "+ param.name +" instead.") true case _ => false @@ -539,12 +537,12 @@ trait NamesDefaults { self: Analyzer => // treat the arg as an assignment of type Unit Assign(arg.lhs, rhs) setPos arg.pos } - else fail("unknown parameter name: " + name) + else UnknownParameterNameNamesDefaultError(arg, name) } else if (argPos contains pos) - fail("parameter specified twice: " + name) + DoubleParamNamesDefaultError(arg, name) else if (isAmbiguousAssignment(typer, params(pos), arg)) - fail("reference to " + name + " is ambiguous; it is both a method parameter and a variable in scope.") + AmbiguousReferenceInNamesDefaultError(arg, name) else { // if the named argument is on the original parameter // position, positional after named is allowed. @@ -556,7 +554,7 @@ trait NamesDefaults { self: Analyzer => case _ => argPos(index) = index if (positionalAllowed) arg - else fail("positional after named argument.") + else PositionalAfterNamedNamesDefaultError(arg) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index ed185c27d6..dc5e248631 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -315,7 +315,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => val extractorCall = try { context.undetparams = Nil silent(_.typed(Apply(Select(orig, extractor), List(Ident(nme.SELECTOR_DUMMY) setType fun.tpe.finalResultType)), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - case extractorCall: Tree => extractorCall // if !extractorCall.containsError() + case SilentResultValue(extractorCall) => extractorCall // if !extractorCall.containsError() case _ => // this fails to resolve overloading properly... // Apply(typedOperator(Select(orig, extractor)), List(Ident(nme.SELECTOR_DUMMY))) // no need to set the type of the dummy arg, it will be replaced anyway diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 112aa47114..0405163ef3 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1240,11 +1240,11 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R } /* Check whether argument types conform to bounds of type parameters */ - private def checkBounds(pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type], pos: Position): Unit = - try typer.infer.checkBounds(pos, pre, owner, tparams, argtps, "") + private def checkBounds(tree0: Tree, pre: Type, owner: Symbol, tparams: List[Symbol], argtps: List[Type]): Unit = + try typer.infer.checkBounds(tree0, pre, owner, tparams, argtps, "") catch { case ex: TypeError => - unit.error(pos, ex.getMessage()); + unit.error(tree0.pos, ex.getMessage()) if (settings.explaintypes.value) { val bounds = tparams map (tp => tp.info.instantiateTypeParams(tparams, argtps).bounds) (argtps, bounds).zipped map ((targ, bound) => explainTypes(bound.lo, targ)) @@ -1374,22 +1374,22 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R false } - private def checkTypeRef(tp: Type, pos: Position) = tp match { + private def checkTypeRef(tp: Type, tree: Tree) = tp match { case TypeRef(pre, sym, args) => - checkDeprecated(sym, pos) + checkDeprecated(sym, tree.pos) if(sym.isJavaDefined) sym.typeParams foreach (_.cookJavaRawInfo()) if (!tp.isHigherKinded) - checkBounds(pre, sym.owner, sym.typeParams, args, pos) + checkBounds(tree, pre, sym.owner, sym.typeParams, args) case _ => } - private def checkAnnotations(tpes: List[Type], pos: Position) = tpes foreach (tp => checkTypeRef(tp, pos)) + private def checkAnnotations(tpes: List[Type], tree: Tree) = tpes foreach (tp => checkTypeRef(tp, tree)) private def doTypeTraversal(tree: Tree)(f: Type => Unit) = if (!inPattern) tree.tpe foreach f private def applyRefchecksToAnnotations(tree: Tree): Unit = { def applyChecks(annots: List[AnnotationInfo]) = { - checkAnnotations(annots map (_.atp), tree.pos) + checkAnnotations(annots map (_.atp), tree) transformTrees(annots flatMap (_.args)) } @@ -1404,7 +1404,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R case tpt@TypeTree() => if(tpt.original != null) { tpt.original foreach { - case dc@TypeTreeWithDeferredRefCheck() => applyRefchecksToAnnotations(dc.check()) // #2416 + case dc@TypeTreeWithDeferredRefCheck() => + applyRefchecksToAnnotations(dc.check()) // #2416 case _ => } } @@ -1450,7 +1451,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R unit.error(tree.pos, "too many dimensions for array creation") Literal(Constant(null)) } else { - localTyper.getManifestTree(tree.pos, etpe, false) + localTyper.getManifestTree(tree, etpe, false) } } val newResult = localTyper.typedPos(tree.pos) { @@ -1578,13 +1579,13 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R case ExistentialType(tparams, tpe) => existentialParams ++= tparams case t: TypeRef => - checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree.pos) + checkTypeRef(deriveTypeWithWildcards(existentialParams.toList)(t), tree) case _ => } tree case TypeApply(fn, args) => - checkBounds(NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe), tree.pos) + checkBounds(tree, NoPrefix, NoSymbol, fn.tpe.typeParams, args map (_.tpe)) transformCaseApply(tree, ()) case x @ Apply(_, _) => @@ -1641,7 +1642,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R result } catch { case ex: TypeError => - if (settings.debug.value) ex.printStackTrace(); + if (settings.debug.value) ex.printStackTrace() unit.error(tree.pos, ex.getMessage()) tree } finally { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 4e4fbe35cb..b109d57554 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -51,12 +51,21 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT } private def checkPackedConforms(tree: Tree, pt: Type): Tree = { + def typeError(typer: analyzer.Typer, pos: Position, found: Type, req: Type) { + if (!found.isErroneous && !req.isErroneous) { + val msg = analyzer.ErrorUtils.typeErrorMsg(found, req, typer.infer.isPossiblyMissingArgs(found, req)) + typer.context.error(pos, analyzer.withAddendum(pos)(msg)) + if (settings.explaintypes.value) + explainTypes(found, req) + } + } + if (tree.tpe exists (_.typeSymbol.isExistentialSkolem)) { val packed = localTyper.packedType(tree, NoSymbol) if (!(packed <:< pt)) { val errorContext = localTyper.context.make(localTyper.context.tree) - errorContext.reportGeneralErrors = true - analyzer.newTyper(errorContext).infer.typeError(tree.pos, packed, pt) + errorContext.setReportErrors() + typeError(analyzer.newTyper(errorContext), tree.pos, packed, pt) } } tree diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index fe3ceafa2d..8c434a8838 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -93,37 +93,6 @@ trait TypeDiagnostics { } } - def notAMemberMessage(pos: Position, qual: Tree, name: Name) = { - val owner = qual.tpe.typeSymbol - val target = qual.tpe.widen - def targetKindString = if (owner.isTypeParameterOrSkolem) "type parameter " else "" - def nameString = decodeWithKind(name, owner) - /** Illuminating some common situations and errors a bit further. */ - def addendum = { - val companion = { - if (name.isTermName && owner.isPackageClass) { - target.member(name.toTypeName) match { - case NoSymbol => "" - case sym => "\nNote: %s exists, but it has no companion object.".format(sym) - } - } - else "" - } - val semicolon = ( - if (posPrecedes(qual.pos, pos)) - "\npossible cause: maybe a semicolon is missing before `"+nameString+"'?" - else - "" - ) - companion + semicolon - } - - withAddendum(qual.pos)( - if (name == nme.CONSTRUCTOR) target + " does not have a constructor" - else nameString + " is not a member of " + targetKindString + target + addendum - ) - } - /** An explanatory note to be added to error messages * when there's a problem with abstract var defs */ def abstractVarMessage(sym: Symbol): String = @@ -131,9 +100,6 @@ trait TypeDiagnostics { "\n(Note that variables need to be initialized to be defined)" else "" - def notAMemberError(pos: Position, qual: Tree, name: Name) = - context.error(pos, notAMemberMessage(pos, qual, name)) - /** Only prints the parameter names if they're not synthetic, * since "x$1: Int" does not offer any more information than "Int". */ @@ -154,21 +120,6 @@ trait TypeDiagnostics { def alternativesString(tree: Tree) = alternatives(tree) map (x => " " + methodTypeErrorString(x)) mkString ("", " \n", "\n") - def missingParameterTypeMsg(fun: Tree, vparam: ValDef, pt: Type) = { - def anonMessage = ( - "\nThe argument types of an anonymous function must be fully known. (SLS 8.5)" + - "\nExpected type was: " + pt.toLongString - ) - val suffix = - if (!vparam.mods.isSynthetic) "" - else " for expanded function" + (fun match { - case Function(_, Match(_, _)) => anonMessage - case _ => " " + fun - }) - - "missing parameter type" + suffix - } - /** The symbol which the given accessor represents (possibly in part). * This is used for error messages, where we want to speak in terms * of the actual declaration or definition, not in terms of the generated setters @@ -202,34 +153,6 @@ trait TypeDiagnostics { else defaultMessage } - def notEnoughArgumentsMsg(fun: Tree, missing: List[Symbol]): String = { - val suffix = { - if (missing.isEmpty) "" - else { - val keep = missing take 3 map (_.name) - ".\nUnspecified value parameter%s %s".format( - if (missing.tail.isEmpty) "" else "s", - if (missing drop 3 nonEmpty) (keep :+ "...").mkString(", ") - else keep.mkString("", ", ", ".") - ) - } - } - - "not enough arguments for " + treeSymTypeMsg(fun) + suffix - } - - def applyErrorMsg(tree: Tree, msg: String, argtpes: List[Type], pt: Type) = { - def asParams(xs: List[Any]) = xs.mkString("(", ", ", ")") - - def resType = if (pt isWildcard) "" else " with expected result type " + pt - def allTypes = (alternatives(tree) flatMap (_.paramTypes)) ++ argtpes :+ pt - def locals = alternatives(tree) flatMap (_.typeParams) - - withDisambiguation(locals, allTypes: _*) { - treeSymTypeMsg(tree) + msg + asParams(argtpes) + resType - } - } - def disambiguate(ss: List[String]) = ss match { case Nil => Nil case s :: ss => s :: (ss map { case `s` => "(some other)"+s ; case x => x }) @@ -446,8 +369,8 @@ trait TypeDiagnostics { trait TyperDiagnostics { self: Typer => - private def contextError(pos: Position, msg: String) = context.error(pos, msg) - private def contextError(pos: Position, err: Throwable) = context.error(pos, err) + private def contextError(context0: Analyzer#Context, pos: Position, msg: String) = context0.error(pos, msg) + private def contextError(context0: Analyzer#Context, pos: Position, err: Throwable) = context0.error(pos, err) private def contextWarning(pos: Position, msg: String) = context.unit.warning(pos, msg) def permanentlyHiddenWarning(pos: Position, hidden: Name, defn: Symbol) = @@ -466,14 +389,8 @@ trait TypeDiagnostics { // Error suppression will squash some of these warnings unless we circumvent it. // It is presumed if you are using a -Y option you would really like to hear // the warnings you've requested. - if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK) { - val saved = context.reportGeneralErrors - try { - context.reportGeneralErrors = true - context.warning(tree.pos, "dead code following this construct") - } - finally context.reportGeneralErrors = saved - } + if (settings.warnDeadCode.value && context.unit.exists && treeOK(tree) && exprOK) + context.warning(tree.pos, "dead code following this construct", true) tree } @@ -485,8 +402,8 @@ trait TypeDiagnostics { } } - def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded - def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive" + private def symWasOverloaded(sym: Symbol) = sym.owner.isClass && sym.owner.info.member(sym.name).isOverloaded + private def cyclicAdjective(sym: Symbol) = if (symWasOverloaded(sym)) "overloaded" else "recursive" /** Returns Some(msg) if the given tree is untyped apparently due * to a cyclic reference, and None otherwise. @@ -500,15 +417,18 @@ trait TypeDiagnostics { "\nIf applicable, you may wish to try moving some members into another object." ) } - + /** Report a type error. * * @param pos0 The position where to report the error * @param ex The exception that caused the error */ - def reportTypeError(pos: Position, ex: TypeError) { + def reportTypeError(context0: Context, pos: Position, ex: TypeError) { if (ex.pos == NoPosition) ex.pos = pos - if (!context.reportGeneralErrors) throw ex + // TODO: should be replaced by throwErrors + // but it seems that throwErrors excludes some of the errors that should actually be + // buffered, causing TypeErrors to fly around again. This needs some more investigation. + if (!context0.reportErrors) throw ex if (settings.debug.value) ex.printStackTrace() ex match { @@ -517,12 +437,12 @@ trait TypeDiagnostics { case Import(expr, _) => expr.pos case _ => ex.pos } - contextError(pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) + contextError(context0, pos, cyclicReferenceMessage(sym, info.tree) getOrElse ex.getMessage()) if (sym == ObjectClass) throw new FatalError("cannot redefine root "+sym) case _ => - contextError(ex.pos, ex) + contextError(context0, ex.pos, ex) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 216ad6cd4c..da7e54af5c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -12,14 +12,12 @@ package scala.tools.nsc package typechecker -import scala.collection.{ mutable, immutable } +import scala.collection.mutable import scala.tools.nsc.util.BatchSourceFile import mutable.ListBuffer import symtab.Flags._ import util.Statistics import util.Statistics._ -import scala.tools.util.StringOps.{ countAsString, countElementsAsString } -import scala.tools.util.EditDistance.similarString // Suggestion check whether we can do without priming scopes with symbols of outer scopes, // like the IDE does. @@ -60,7 +58,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { super.traverse(tree) } } -/* needed for experimental version where eraly types can be type arguments +/* needed for experimental version where early types can be type arguments class EarlyMap(clazz: Symbol) extends TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(NoPrefix, sym, List()) if (sym hasFlag PRESUPER) => @@ -71,6 +69,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } */ + sealed abstract class SilentResult[+T] + case class SilentTypeError(err: AbsTypeError) extends SilentResult[Nothing] { } + case class SilentResultValue[+T](value: T) extends SilentResult[T] { } + def newTyper(context: Context): Typer = new NormalTyper(context) private class NormalTyper(context : Context) extends Typer(context) @@ -80,9 +82,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { private def isPastTyper = phase.id > currentRun.typerPhase.id - abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation { + abstract class Typer(context0: Context) extends TyperDiagnostics with Adaptation with TyperContextErrors { import context0.unit import typeDebug.{ ptTree, ptBlock, ptLine } + import TyperErrorGen._ val infer = new Inferencer(context0) { override def isCoercible(tp: Type, pt: Type): Boolean = undoLog undo { // #3281 @@ -102,15 +105,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree))) var mkArg: (Tree, Name) => Tree = mkPositionalArg - def errorMessage(paramName: Name, paramTp: Type) = - paramTp.typeSymbol match { - case ImplicitNotFoundMsg(msg) => msg.format(paramName, paramTp) - case _ => - "could not find implicit value for "+ - (if (paramName startsWith nme.EVIDENCE_PARAM_PREFIX) "evidence parameter of type " - else "parameter "+paramName+": ")+paramTp - } - // DEPMETTODO: instantiate type vars that depend on earlier implicit args (see adapt (4.1)) // // apply the substitutions (undet type param -> type) that were determined @@ -127,8 +121,19 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { argBuff += mkArg(res.tree, param.name) } else { mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args - if (!param.hasDefault) - context.error(fun.pos, errorMessage(param.name, param.tpe)) + if (!param.hasDefault) { + context.errBuffer.find(_.kind == ErrorKinds.Divergent) match { + case Some(divergentImplicit) => + // DivergentImplicit error has higher priority than "no implicit found" + // no need to issue the problem again if we are still in silent mode + if (context.reportErrors) { + context.issue(divergentImplicit) + context.condBufferFlush(_.kind == ErrorKinds.Divergent) + } + case None => + NoImplicitFoundError(fun, param) + } + } /* else { TODO: alternative (to expose implicit search failure more) --> resolve argument, do type inference, keep emitting positional args, infer type params based on default value for arg @@ -149,6 +154,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case ErrorType => fun } + + def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = + inferView(tree, from, to, reportAmbiguous, true) /** Infer an implicit conversion (``view'') between two types. * @param tree The tree which needs to be converted. @@ -157,8 +165,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * @param reportAmbiguous Should ambiguous implicit errors be reported? * False iff we search for a view to find out * whether one type is coercible to another. + * @param saveErrors Should ambiguous and divergent implicit errors that were buffered + * during the inference of a view be put into the original buffer. + * False iff we don't care about them. */ - def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean): Tree = { + def inferView(tree: Tree, from: Type, to: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { debuglog("infer view from "+from+" to "+to)//debug if (isPastTyper) EmptyTree else from match { @@ -167,7 +178,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case PolyType(_, _) => EmptyTree case _ => def wrapImplicit(from: Type): Tree = { - val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context) + val result = inferImplicit(tree, functionType(List(from), to), reportAmbiguous, true, context, saveErrors) if (result.subst != EmptyTreeTypeSubstituter) result.subst traverse tree result.tree } @@ -205,22 +216,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * @return ... */ def checkStable(tree: Tree): Tree = - if (treeInfo.isExprSafeToInline(tree)) tree - else errorTree( - tree, - "stable identifier required, but "+tree+" found."+ - (if (isStableExceptVolatile(tree)) { - val tpe = tree.symbol.tpe match { - case PolyType(_, rtpe) => rtpe - case t => t - } - "\n Note that "+tree.symbol+" is not stable because its type, "+tree.tpe+", is volatile." - } else "")) + if (treeInfo.isExprSafeToInline(tree)) tree else UnstableTreeError(tree) /** Would tree be a stable (i.e. a pure expression) if the type * of its symbol was not volatile? */ - private def isStableExceptVolatile(tree: Tree) = { + protected def isStableExceptVolatile(tree: Tree) = { tree.hasSymbol && tree.symbol != NoSymbol && tree.tpe.isVolatile && { val savedTpe = tree.symbol.info val savedSTABLE = tree.symbol getFlag STABLE @@ -234,21 +235,24 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } /** Check that `tpt` refers to a non-refinement class type */ - def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean) { - def errorNotClass(found: AnyRef) = error(tpt.pos, "class type required but "+found+" found") - def check(tpe: Type): Unit = tpe.normalize match { + def checkClassType(tpt: Tree, existentialOK: Boolean, stablePrefix: Boolean): Boolean = { + def errorNotClass(found: AnyRef) = { ClassTypeRequiredError(tpt, found); false } + def check(tpe: Type): Boolean = tpe.normalize match { case TypeRef(pre, sym, _) if sym.isClass && !sym.isRefinementClass => - if (stablePrefix && !isPastTyper) { - if (!pre.isStable) - error(tpt.pos, "type "+pre+" is not a stable prefix") - // A type projection like X#Y can get by the stable check if the - // prefix is singleton-bounded, so peek at the tree too. - else tpt match { + if (stablePrefix && !isPastTyper) + if (!pre.isStable) { + TypeNotAStablePrefixError(tpt, pre) + false + } else + // A type projection like X#Y can get by the stable check if the + // prefix is singleton-bounded, so peek at the tree too. + tpt match { case SelectFromTypeTree(qual, _) if !isSingleType(qual.tpe) => errorNotClass(tpt) - case _ => ; - } - } - case ErrorType => ; + case _ => true + } + else + true + case ErrorType => true case PolyType(_, restpe) => check(restpe) case ExistentialType(_, restpe) if existentialOK => check(restpe) case AnnotatedType(_, underlying, _) => check(underlying) @@ -264,17 +268,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * @return true if tp is not a subtype of itself. */ def checkNonCyclic(pos: Position, tp: Type): Boolean = { - def checkNotLocked(sym: Symbol): Boolean = { + def checkNotLocked(sym: Symbol) = { sym.initialize - sym.lockOK || {error(pos, "cyclic aliasing or subtyping involving "+sym); false} + sym.lockOK || { CyclicAliasingOrSubtypingError(pos, sym); false } } tp match { case TypeRef(pre, sym, args) => - (checkNotLocked(sym)) && ( - !sym.isNonClassType || - checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym) // @M! info for a type ref to a type parameter now returns a polytype - // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym) - ) + checkNotLocked(sym) && + ((!sym.isNonClassType) || checkNonCyclic(pos, appliedType(pre.memberInfo(sym), args), sym)) + // @M! info for a type ref to a type parameter now returns a polytype + // @M was: checkNonCyclic(pos, pre.memberInfo(sym).subst(sym.typeParams, args), sym) + case SingleType(pre, sym) => checkNotLocked(sym) /* @@ -293,10 +297,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } def checkNonCyclic(pos: Position, tp: Type, lockedSym: Symbol): Boolean = try { - lockedSym.lock { - throw new TypeError("illegal cyclic reference involving " + lockedSym) - } - checkNonCyclic(pos, tp) + if (!lockedSym.lock(CyclicReferenceError(pos, lockedSym))) false + else checkNonCyclic(pos, tp) } finally { lockedSym.unlock() } @@ -312,26 +314,24 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } - def checkParamsConvertible(pos: Position, tpe: Type) { - tpe match { - case MethodType(formals, restpe) => - /* - if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1) - error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters") - if (formals exists (isRepeatedParamType(_))) - error(pos, "methods with `*`-parameters cannot be converted to function values"); - */ - if (restpe.isDependent) - error(pos, "method with dependent type "+tpe+" cannot be converted to function value") - checkParamsConvertible(pos, restpe) - case _ => - } + def checkParamsConvertible(tree: Tree, tpe0: Type) { + def checkParamsConvertible0(tpe: Type) = + tpe match { + case MethodType(formals, restpe) => + /* + if (formals.exists(_.typeSymbol == ByNameParamClass) && formals.length != 1) + error(pos, "methods with `=>`-parameter can be converted to function values only if they take no other parameters") + if (formals exists (isRepeatedParamType(_))) + error(pos, "methods with `*`-parameters cannot be converted to function values"); + */ + if (restpe.isDependent) + DependentMethodTpeConversionToFunctionError(tree, tpe) + checkParamsConvertible(tree, restpe) + case _ => + } + checkParamsConvertible0(tpe0) } - def checkStarPatOK(pos: Position, mode: Int) = - if ((mode & STARmode) == 0 && !isPastTyper) - error(pos, "star patterns must correspond with varargs parameters") - /** Check that type of given tree does not contain local or private * components. */ @@ -362,13 +362,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def locals[T <: Tree](scope: Scope, pt: Type, tree: T): T = check(NoSymbol, scope, pt, tree) - def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { + private def check[T <: Tree](owner: Symbol, scope: Scope, pt: Type, tree: T): T = { this.owner = owner this.scope = scope hiddenSymbols = List() val tp1 = apply(tree.tpe) if (hiddenSymbols.isEmpty) tree setType tp1 - else if (hiddenSymbols exists (_.isErroneous)) setError(tree) + else if (hiddenSymbols exists (_.isErroneous)) HiddenSymbolWithError(tree) else if (isFullyDefined(pt)) tree setType pt else if (tp1.typeSymbol.isAnonymousClass) check(owner, scope, pt, tree setType tp1.typeSymbol.classBound) @@ -376,10 +376,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { tree setType packSymbols(hiddenSymbols.reverse, tp1) else if (!phase.erasedTypes) { // privates val badSymbol = hiddenSymbols.head - error(tree.pos, - (if (badSymbol.isPrivate) "private " else "") + badSymbol + - " escapes its defining scope as part of type "+tree.tpe) - setError(tree) + SymbolEscapesScopeError(tree, badSymbol) } else tree } @@ -441,28 +438,66 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { /** The qualifying class * of a this or super with prefix qual. + * packageOk is equal false when qualifying class symbol */ - def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean): Symbol = + def qualifyingClass(tree: Tree, qual: Name, packageOK: Boolean = false): Option[Symbol] = context.enclClass.owner.ownerChain.find(o => qual.isEmpty || o.isClass && o.name == qual) match { case Some(c) if packageOK || !c.isPackageClass => - c + Some(c) case _ => - error( - tree.pos, - if (qual.isEmpty) tree+" can be used only in a class, object, or template" - else qual+" is not an enclosing class") - NoSymbol + QualifyingClassError(tree, qual) + None } /** The typer for an expression, depending on where we are. If we are before a superclass * call, this is a typer over a constructor context; otherwise it is the current typer. */ - def constrTyperIf(inConstr: Boolean): Typer = + @inline + final def constrTyperIf(inConstr: Boolean): Typer = if (inConstr) { assert(context.undetparams.isEmpty) newTyper(context.makeConstructorContext) } else this + @inline + final def withCondConstrTyper[T](inConstr: Boolean)(f: Typer => T): T = + if (inConstr) { + assert(context.undetparams.isEmpty) + val c = context.makeConstructorContext + typerWithLocalContext(c)(f) + } else { + f(this) + } + + @inline + final def typerWithCondLocalContext[T](c: => Context)(cond: Boolean)(f: Typer => T): T = + if (cond) typerWithLocalContext(c)(f) else f(this) + + @inline + final def typerWithLocalContext[T](c: Context)(f: Typer => T): T = { + val res = f(newTyper(c)) + if (c.hasErrors) + context.updateBuffer(c.flushAndReturnBuffer()) + res + } + + @inline + final def typerReportAnyContextErrors[T](c: Context)(f: Typer => T): T = { + val res = f(newTyper(c)) + if (c.hasErrors) + context.issue(c.errBuffer.head) + res + } + + @inline + final def withSavedContext[T](c: Context)(f: => T) = { + val savedErrors = c.flushAndReturnBuffer() + val res = f + c.updateBuffer(savedErrors) + res + } + + /** The typer for a label definition. If this is part of a template we * first have to enter the label definition. */ @@ -573,9 +608,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { inferExprAlternative(tree, pt) val sym = tree.symbol - def fail() = errorTree(tree, sym.kindString + " " + sym.fullName + " is not a value") + def fail() = NotAValueError(tree, sym) - if (tree.tpe.isError) tree + if (tree.isErrorTyped) tree else if ((mode & (PATTERNmode | FUNmode)) == PATTERNmode && tree.isTerm) { // (1) if (sym.isValue) checkStable(tree) else fail() @@ -629,15 +664,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } def silent[T](op: Typer => T, - reportAmbiguousErrors: Boolean = context.reportAmbiguousErrors, - newtree: Tree = context.tree): Any /* in fact, TypeError or T */ = { + reportAmbiguousErrors: Boolean = context.ambiguousErrors, + newtree: Tree = context.tree): SilentResult[T] = { val rawTypeStart = startCounter(rawTypeFailed) val findMemberStart = startCounter(findMemberFailed) val subtypeStart = startCounter(subtypeFailed) val failedSilentStart = startTimer(failedSilentNanos) try { - if (context.reportGeneralErrors || - reportAmbiguousErrors != context.reportAmbiguousErrors || + if (context.reportErrors || + reportAmbiguousErrors != context.ambiguousErrors || newtree != context.tree) { val context1 = context.makeSilent(reportAmbiguousErrors, newtree) context1.undetparams = context.undetparams @@ -648,38 +683,29 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { context.undetparams = context1.undetparams context.savedTypeBounds = context1.savedTypeBounds context.namedApplyBlockInfo = context1.namedApplyBlockInfo - result + if (context1.hasErrors) SilentTypeError(context1.errBuffer.head) + else SilentResultValue(result) } else { - op(this) + assert(context.bufferErrors || isPastTyper, "silent mode is not available past typer") + withSavedContext(context){ + val res = op(this) + val errorsToReport = context.flushAndReturnBuffer() + if (errorsToReport.isEmpty) SilentResultValue(res) else SilentTypeError(errorsToReport.head) + } } } catch { case ex: CyclicReference => throw ex case ex: TypeError => + // fallback in case TypeError is still thrown + // @H this happens for example in cps annotation checker stopCounter(rawTypeFailed, rawTypeStart) stopCounter(findMemberFailed, findMemberStart) stopCounter(subtypeFailed, subtypeStart) stopTimer(failedSilentNanos, failedSilentStart) - ex + SilentTypeError(TypeErrorWrapper(ex)) } } - /** Utility method: Try op1 on tree. If that gives an error try op2 instead. - */ - def tryBoth(tree: Tree)(op1: (Typer, Tree) => Tree)(op2: (Typer, Tree) => Tree): Tree = - silent(op1(_, tree)) match { - case result1: Tree => - result1 - case ex1: TypeError => - silent(op2(_, resetAllAttrs(tree))) match { - case result2: Tree => -// println("snd succeeded: "+result2) - result2 - case ex2: TypeError => - reportTypeError(tree.pos, ex1) - setError(tree) - } - } - def isCodeType(tpe: Type) = tpe.typeSymbol isNonBottomSubClass CodeClass /** Perform the following adaptations of expression, pattern or type `tree` wrt to @@ -724,8 +750,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (context.undetparams nonEmpty) { // (9) -- should revisit dropped condition `(mode & POLYmode) == 0` // dropped so that type args of implicit method are inferred even if polymorphic expressions are allowed // needed for implicits in 2.8 collection library -- maybe once #3346 is fixed, we can reinstate the condition? - context.undetparams = - inferExprInstance(tree, context.extractUndetparams(), pt, + context.undetparams = inferExprInstance(tree, context.extractUndetparams(), pt, // approximate types that depend on arguments since dependency on implicit argument is like dependency on type parameter mt.approximate, // if we are looking for a manifest, instantiate type to Nothing anyway, @@ -736,18 +761,24 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { useWeaklyCompatible = true) // #3808 } - val typer1 = constrTyperIf(treeInfo.isSelfOrSuperConstrCall(tree)) - if (original != EmptyTree && pt != WildcardType) - typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match { - case result: Tree => result - case ex: TypeError => - debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original)) - val tree1 = typed(resetAllAttrs(original), mode, WildcardType) - tree1.tpe = addAnnotations(tree1, tree1.tpe) - if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree) - } - else - typer1.typed(typer1.applyImplicitArgs(tree), mode, pt) + // avoid throwing spurious DivergentImplicit errors + if (context.hasErrors) + return setError(tree) + + withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 => + if (original != EmptyTree && pt != WildcardType) + typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match { + case SilentResultValue(result) => + result + case _ => + debuglog("fallback on implicits: " + tree + "/" + resetAllAttrs(original)) + val tree1 = typed(resetAllAttrs(original), mode, WildcardType) + tree1.tpe = addAnnotations(tree1, tree1.tpe) + if (tree1.isEmpty) tree1 else adapt(tree1, mode, pt, EmptyTree) + } + else + typer1.typed(typer1.applyImplicitArgs(tree), mode, pt) + } } def instantiateToMethodType(mt: MethodType): Tree = { @@ -758,7 +789,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } if (!meth.isConstructor && !meth.isMacro && isFunctionType(pt)) { // (4.2) debuglog("eta-expanding " + tree + ":" + tree.tpe + " to " + pt) - checkParamsConvertible(tree.pos, tree.tpe) + checkParamsConvertible(tree, tree.tpe) val tree0 = etaExpand(context.unit, tree) // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode)) @@ -775,9 +806,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } else if (!meth.isConstructor && mt.params.isEmpty) { // (4.3) adapt(typed(Apply(tree, List()) setPos tree.pos), mode, pt, original) } else if (context.implicitsEnabled) { - errorTree(tree, "missing arguments for " + meth + meth.locationString + - (if (meth.isConstructor) "" - else ";\nfollow this method with `_' if you want to treat it as a partially applied function")) + MissingArgsForMethodTpeError(tree, meth) } else { setError(tree) } @@ -792,8 +821,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // or raw type (tree.symbol.isJavaDefined && context.unit.isJava), types must be of kind *, // and thus parameterized types must be applied to their type arguments // @M TODO: why do kind-* tree's have symbols, while higher-kinded ones don't? - errorTree(tree, tree.symbol + " takes type parameters") - tree setType tree.tpe + MissingTypeParametersError(tree) } else if ( // (7.1) @M: check kind-arity // @M: removed check for tree.hasSymbol and replace tree.symbol by tree.tpe.symbol (TypeTree's must also be checked here, and they don't directly have a symbol) (inHKMode(mode)) && @@ -808,9 +836,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // Note that we treat Any and Nothing as kind-polymorphic. // We can't perform this check when typing type arguments to an overloaded method before the overload is resolved // (or in the case of an error type) -- this is indicated by pt == WildcardType (see case TypeApply in typed1). - errorTree(tree, tree.tpe + " takes " + countElementsAsString(tree.tpe.typeParams.length, "type parameter") + - ", expected: " + countAsString(pt.typeParams.length)) - tree setType tree.tpe + KindArityMismatchError(tree, pt) } else tree match { // (6) case TypeTree() => tree case _ => TypeTree(tree.tpe) setOriginal (tree) setPos (tree.pos) @@ -836,7 +862,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { tree } } else { - errorTree(tree, tree.symbol + " is not a case class constructor, nor does it have an unapply/unapplySeq method") + CaseClassConstructorError(tree) } } @@ -960,7 +986,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (!context.undetparams.isEmpty) { return instantiate(tree, mode, pt) } - if (context.implicitsEnabled && !tree.tpe.isError && !pt.isError) { + if (context.implicitsEnabled && !pt.isError && !tree.isErrorTyped) { // (14); the condition prevents chains of views debuglog("inferring view from " + tree.tpe + " to " + pt) val coercion = inferView(tree, tree.tpe, pt, true) @@ -977,8 +1003,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { unit.echo(tree.pos, msg) debuglog(msg) - return newTyper(context.makeImplicit(context.reportAmbiguousErrors)).typed( + val silentContext = context.makeImplicit(context.ambiguousErrors) + val res = newTyper(silentContext).typed( new ApplyImplicitView(coercion, List(tree)) setPos tree.pos, mode, pt) + if (silentContext.hasErrors) context.issue(silentContext.errBuffer.head) else return res } } } @@ -986,31 +1014,34 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { log("error tree = " + tree) if (settings.explaintypes.value) explainTypes(tree.tpe, pt) } - try { - typeErrorTree(tree, tree.tpe, pt) - } catch { - case ex: TypeError => - if (isPastTyper && pt.existentialSkolems.nonEmpty) { - // Ignore type errors raised in later phases that are due to mismatching types with existential skolems - // We have lift crashing in 2.9 with an adapt failure in the pattern matcher. - // Here's my hypothsis why this happens. The pattern matcher defines a variable of type - // - // val x: T = expr - // - // where T is the type of expr, but T contains existential skolems ts. - // In that case, this value definition does not typecheck. - // The value definition - // - // val x: T forSome { ts } = expr - // - // would typecheck. Or one can simply leave out the type of the `val`: - // - // val x = expr - context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree) - adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt)) - } else - throw ex + + val found = tree.tpe + val req = pt + if (!found.isErroneous && !req.isErroneous) { + if (!context.reportErrors && isPastTyper && req.existentialSkolems.nonEmpty) { + // Ignore type errors raised in later phases that are due to mismatching types with existential skolems + // We have lift crashing in 2.9 with an adapt failure in the pattern matcher. + // Here's my hypothsis why this happens. The pattern matcher defines a variable of type + // + // val x: T = expr + // + // where T is the type of expr, but T contains existential skolems ts. + // In that case, this value definition does not typecheck. + // The value definition + // + // val x: T forSome { ts } = expr + // + // would typecheck. Or one can simply leave out the type of the `val`: + // + // val x = expr + context.unit.warning(tree.pos, "recovering from existential Skolem type error in tree \n" + tree + "\nwith type " + tree.tpe + "\n expected type = " + pt + "\n context = " + context.tree) + adapt(tree, mode, deriveTypeWithWildcards(pt.existentialSkolems)(pt)) + } else { + // create an actual error + AdaptTypeError(tree, found, req) + } } + setError(tree) } } } @@ -1027,7 +1058,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def instantiateExpectingUnit(tree: Tree, mode: Int): Tree = { val savedUndetparams = context.undetparams silent(_.instantiate(tree, mode, UnitClass.tpe)) match { - case t: Tree => t + case SilentResultValue(t) => t case _ => context.undetparams = savedUndetparams val valueDiscard = atPos(tree.pos)(Block(List(instantiate(tree, mode, WildcardType)), Literal(Constant()))) @@ -1053,15 +1084,20 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // Note: implicit arguments are still inferred (this kind of "chaining" is allowed) ) } + + def adaptToMember(qual: Tree, searchTemplate: Type): Tree = + adaptToMember(qual, searchTemplate, true, true) + def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree = + adaptToMember(qual, searchTemplate, reportAmbiguous, true) - def adaptToMember(qual: Tree, searchTemplate: Type): Tree = { + def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { if (isAdaptableWithView(qual)) { qual.tpe.widen.normalize match { case et: ExistentialType => qual setType et.skolemizeExistential(context.owner, qual) // open the existential case _ => } - inferView(qual, qual.tpe, searchTemplate, true) match { + inferView(qual, qual.tpe, searchTemplate, reportAmbiguous, saveErrors) match { case EmptyTree => qual case coercion => if (settings.logImplicitConv.value) @@ -1083,13 +1119,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * If no conversion is found, return `qual` unchanged. * */ - def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type): Tree = { + def adaptToArguments(qual: Tree, name: Name, args: List[Tree], pt: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { def doAdapt(restpe: Type) = //util.trace("adaptToArgs "+qual+", name = "+name+", argtpes = "+(args map (_.tpe))+", pt = "+pt+" = ") - adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe)) + adaptToMember(qual, HasMethodMatching(name, args map (_.tpe), restpe), reportAmbiguous, saveErrors) if (pt != WildcardType) { silent(_ => doAdapt(pt)) match { - case result: Tree if result != qual => + case SilentResultValue(result) if result != qual => result case _ => debuglog("fallback on implicits in adaptToArguments: "+qual+" . "+name) @@ -1099,30 +1135,32 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { doAdapt(pt) } - /** Try o apply an implicit conversion to `qual` to that it contains - * a method `name`. If that's ambiguous try taking arguments into account using `adaptToArguments`. + /** Try to apply an implicit conversion to `qual` so that it contains + * a method `name`. If that's ambiguous try taking arguments into + * account using `adaptToArguments`. */ - def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int): Tree = { - try { - adaptToMember(qual, HasMember(name)) - } catch { - case ex: TypeError => - // this happens if implicits are ambiguous; try again with more context info. - // println("last ditch effort: "+qual+" . "+name) + def adaptToMemberWithArgs(tree: Tree, qual: Tree, name: Name, mode: Int, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { + def onError(reportError: => Tree): Tree = { context.tree match { - case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => // try handling the arguments - // println("typing args: "+args) + case Apply(tree1, args) if (tree1 eq tree) && args.nonEmpty => silent(_.typedArgs(args, mode)) match { - case args: List[_] => - adaptToArguments(qual, name, args.asInstanceOf[List[Tree]], WildcardType) - case _ => - throw ex + case SilentResultValue(xs) => + val args = xs.asInstanceOf[List[Tree]] + if (args exists (_.isErrorTyped)) + reportError + else + adaptToArguments(qual, name, args, WildcardType, reportAmbiguous, saveErrors) + case _ => + reportError } case _ => - // println("not in an apply: "+context.tree+"/"+tree) - throw ex + reportError } } + silent(_.adaptToMember(qual, HasMember(name), false)) match { + case SilentResultValue(res) => res + case SilentTypeError(err) => onError({if (reportAmbiguous) { context.issue(err) }; setError(tree)}) + } } /** Try to apply an implicit conversion to `qual` to that it contains a @@ -1162,7 +1200,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // If first parent is a trait, make it first mixin and add its superclass as first parent while ((supertpt.tpe.typeSymbol ne null) && supertpt.tpe.typeSymbol.initialize.isTrait) { val supertpt1 = typedType(supertpt) - if (!supertpt1.tpe.isError) { + if (!supertpt1.isErrorTyped) { mixins = supertpt1 :: mixins supertpt = TypeTree(supertpt1.tpe.parents.head) setPos supertpt.pos.focus } @@ -1206,14 +1244,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val cbody2 = newTyper(cscope) // called both during completion AND typing. .typePrimaryConstrBody(clazz, cbody1, supertparams, clazz.unsafeTypeParams, vparamss map (_.map(_.duplicate))) + superCall match { case Apply(_, _) => val sarg = treeInfo.firstArgument(superCall) if (sarg != EmptyTree && supertpe.typeSymbol != firstParent) - error(sarg.pos, firstParent+" is a trait; does not take constructor arguments") - if (!supertparams.isEmpty) supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus + ConstrArgsInTraitParentTpeError(sarg, firstParent) + if (!supertparams.isEmpty) + supertpt = TypeTree(cbody2.tpe) setPos supertpt.pos.focus case _ => - if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments") + if (!supertparams.isEmpty) + MissingTypeArgumentsParentTpeError(supertpt) } val preSuperVals = treeInfo.preSuperFields(templ.body) @@ -1223,7 +1264,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { map2(preSuperStats, preSuperVals)((ldef, gdef) => gdef.tpt.tpe = ldef.symbol.tpe) case _ => - if (!supertparams.isEmpty) error(supertpt.pos, "missing type arguments") + if (!supertparams.isEmpty) + MissingTypeArgumentsParentTpeError(supertpt) } /* experimental: early types as type arguments val hasEarlyTypes = templ.body exists (treeInfo.isEarlyTypeDef) @@ -1256,8 +1298,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } catch { case ex: TypeError => - templ.tpe = null - reportTypeError(templ.pos, ex) + // fallback in case of cyclic errors + // @H none of the tests enter here but I couldn't rule it out + ParentTypesError(templ, ex) List(TypeTree(AnyRefClass.tpe)) } @@ -1276,30 +1319,29 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * */ def validateParentClasses(parents: List[Tree], selfType: Type) { + val pending = ListBuffer[AbsTypeError]() def validateParentClass(parent: Tree, superclazz: Symbol) { - if (!parent.tpe.isError) { + if (!parent.isErrorTyped) { val psym = parent.tpe.typeSymbol.initialize checkClassType(parent, false, true) if (psym != superclazz) { if (psym.isTrait) { val ps = psym.info.parents if (!ps.isEmpty && !superclazz.isSubClass(ps.head.typeSymbol)) - error(parent.pos, "illegal inheritance; super"+superclazz+ - "\n is not a subclass of the super"+ps.head.typeSymbol+ - "\n of the mixin " + psym) + pending += ParentSuperSubclassError(parent, superclazz, ps.head.typeSymbol, psym) } else { - error(parent.pos, psym+" needs to be a trait to be mixed in") + pending += ParentNotATraitMixinError(parent, psym) } } - if (psym.isFinal) { - error(parent.pos, "illegal inheritance from final "+psym) - } + if (psym.isFinal) + pending += ParentFinalInheritanceError(parent, psym) + if (psym.isSealed && !phase.erasedTypes) { // AnyVal is sealed, but we have to let the value classes through manually if (context.unit.source.file == psym.sourceFile || isValueClass(context.owner)) psym addChild context.owner else - error(parent.pos, "illegal inheritance from sealed "+psym+": " + context.unit.source.file.canonicalPath + " != " + psym.sourceFile.canonicalPath) + pending += ParentSealedInheritanceError(parent, psym) } if (!(selfType <:< parent.tpe.typeOfThis) && !phase.erasedTypes && @@ -1311,17 +1353,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { //Console.println(context.owner);//DEBUG //Console.println(context.owner.unsafeTypeParams);//DEBUG //Console.println(List.fromArray(context.owner.info.closure));//DEBUG - error(parent.pos, "illegal inheritance;\n self-type "+ - selfType+" does not conform to "+parent + - "'s selftype "+parent.tpe.typeOfThis) + pending += ParentSelfTypeConformanceError(parent, selfType) if (settings.explaintypes.value) explainTypes(selfType, parent.tpe.typeOfThis) } if (parents exists (p => p != parent && p.tpe.typeSymbol == psym && !psym.isError)) - error(parent.pos, psym+" is inherited twice") + pending += ParentInheritedTwiceError(parent, psym) } } - - if (!parents.isEmpty && !parents.head.tpe.isError) + if (!parents.isEmpty && parents.forall(!_.isErrorTyped)) for (p <- parents) validateParentClass(p, parents.head.tpe.typeSymbol) /* @@ -1331,13 +1370,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { ", baseclasses = "+(context.owner.info.baseClasses map (_.fullName))+ ", lin = "+(context.owner.info.baseClasses map (context.owner.thisType.baseType))) */ + pending.foreach(ErrorUtils.issueTypeError) } def checkFinitary(classinfo: ClassInfoType) { val clazz = classinfo.typeSymbol + for (tparam <- clazz.typeParams) { if (classinfo.expansiveRefs(tparam) contains tparam) { - error(tparam.pos, "class graph is not finitary because type parameter "+tparam.name+" is expansively recursive") val newinfo = ClassInfoType( classinfo.parents map (_.instantiateTypeParams(List(tparam), List(AnyRefClass.tpe))), classinfo.decls, @@ -1348,6 +1388,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case _ => newinfo } } + FinitaryError(tparam) } } } @@ -1363,8 +1404,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { assert(clazz != NoSymbol) reenterTypeParams(cdef.tparams) val tparams1 = cdef.tparams mapConserve (typedTypeDef) - val impl1 = newTyper(context.make(cdef.impl, clazz, new Scope)) - .typedTemplate(cdef.impl, parentTypes(cdef.impl)) + val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, new Scope)){ + _.typedTemplate(cdef.impl, parentTypes(cdef.impl)) + } val impl2 = finishMethodSynthesis(impl1, clazz, context) if ((clazz != ClassfileAnnotationClass) && (clazz isNonBottomSubClass ClassfileAnnotationClass)) @@ -1397,17 +1439,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val clazz = mdef.symbol.moduleClass val typedMods = removeAnnotations(mdef.mods) assert(clazz != NoSymbol, mdef) - - val typer0 = newTyper(context.make(mdef.impl, clazz, new Scope)) - val impl1 = typer0.typedTemplate(mdef.impl, { - parentTypes(mdef.impl) ++ ( - if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil - else { - clazz.makeSerializable() - List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus) - } - ) - }) + val impl1 = typerReportAnyContextErrors(context.make(mdef.impl, clazz, new Scope)){ + _.typedTemplate(mdef.impl, { + parentTypes(mdef.impl) ++ ( + if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil + else { + clazz.makeSerializable() + List(TypeTree(SerializableClass.tpe) setPos clazz.pos.focus) + } + ) + })} val impl2 = finishMethodSynthesis(impl1, clazz, context) treeCopy.ModuleDef(mdef, typedMods, mdef.name, impl2) setType NoType @@ -1516,14 +1557,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (sym.hasAnnotation(definitions.VolatileAttr)) { if (!sym.isMutable) - error(vdef.pos, "values cannot be volatile") + VolatileValueError(vdef) else if (sym.isFinal) - error(vdef.pos, "final vars cannot be volatile") + FinalVolatileVarError(vdef) } val rhs1 = if (vdef.rhs.isEmpty) { if (sym.isVariable && sym.owner.isTerm && !isPastTyper) - error(vdef.pos, "local variables must be initialized") + LocalVarUninitializedError(vdef) vdef.rhs } else { val tpt2 = if (sym.hasDefault) { @@ -1571,16 +1612,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val (superConstr, superArgs) = decompose(rhs) assert(superConstr.symbol ne null)//debug + val pending = ListBuffer[AbsTypeError]() // an object cannot be allowed to pass a reference to itself to a superconstructor // because of initialization issues; bug #473 for (arg <- superArgs ; tree <- arg) { val sym = tree.symbol if (sym != null && (sym.info.baseClasses contains clazz)) { if (sym.isModule) - error(tree.pos, "super constructor cannot be passed a self reference unless parameter is declared by-name") + pending += SuperConstrReferenceError(tree) tree match { case This(qual) => - error(tree.pos, "super constructor arguments cannot reference unconstructed `this`") + pending += SuperConstrArgsThisReferenceError(tree) case _ => () } } @@ -1613,6 +1655,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } } + pending.foreach(ErrorUtils.issueTypeError) } /** Check if a structurally defined method violates implementation restrictions. @@ -1661,7 +1704,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { lookupVariable(name.toString.substring(1), enclClass) match { case Some(repl) => silent(_.typedTypeConstructor(stringParser(repl).typ())) match { - case tpt: Tree => + case SilentResultValue(tpt) => val alias = enclClass.newAliasType(name.toTypeName, useCase.pos) val tparams = cloneSymbolsAtOwner(tpt.tpe.typeSymbol.typeParams, alias) alias setInfo typeFun(tparams, appliedType(tpt.tpe, tparams map (_.tpe))) @@ -1711,7 +1754,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { for (vparams1 <- vparamss1; vparam1 <- vparams1 dropRight 1) if (isRepeatedParamType(vparam1.symbol.tpe)) - error(vparam1.pos, "*-parameter must come last") + StarParamNotLastError(vparam1) var tpt1 = checkNoEscaping.privates(meth, typedType(ddef.tpt)) checkNonCyclic(ddef, tpt1) @@ -1723,7 +1766,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { (!meth.owner.isClass || meth.owner.isModuleClass || meth.owner.isAnonOrRefinementClass)) - error(ddef.pos, "constructor definition not allowed here") + InvalidConstructorDefError(ddef) typed(ddef.rhs) } else if (meth.isMacro) { EmptyTree @@ -1738,30 +1781,26 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (!isPastTyper && meth.owner.isClass && meth.paramss.exists(ps => ps.exists(_.hasDefaultFlag) && isRepeatedParamType(ps.last.tpe))) - error(meth.pos, "a parameter section with a `*'-parameter is not allowed to have default arguments") + StarWithDefaultError(meth) if (!isPastTyper) { val allParams = meth.paramss.flatten for (p <- allParams) { for (n <- p.deprecatedParamName) { if (allParams.exists(p1 => p1.name == n || (p != p1 && p1.deprecatedParamName.exists(_ == n)))) - error(p.pos, "deprecated parameter name "+ n +" has to be distinct from any other parameter name (deprecated or not).") + DeprecatedParamNameError(p, n) } } } if (meth.isStructuralRefinementMember) checkMethodStructuralCompatible(meth) - treeCopy.DefDef(ddef, typedMods, ddef.name, tparams1, vparamss1, tpt1, rhs1) setType NoType } - def typedTypeDef(tdef: TypeDef): TypeDef = { - def typeDefTyper = { - if(tdef.tparams isEmpty) Typer.this - else newTyper(context.makeNewScope(tdef, tdef.symbol)) + def typedTypeDef(tdef: TypeDef): TypeDef = + typerWithCondLocalContext(context.makeNewScope(tdef, tdef.symbol))(tdef.tparams.nonEmpty){ + _.typedTypeDef0(tdef) } - typeDefTyper.typedTypeDef0(tdef) - } // call typedTypeDef instead // a TypeDef with type parameters must always be type checked in a new scope @@ -1783,10 +1822,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { checkNonCyclic(tdef.symbol) if (tdef.symbol.owner.isType) rhs1.tpe match { - case TypeBounds(lo1, hi1) => - if (!(lo1 <:< hi1)) - error(tdef.pos, "lower bound "+lo1+" does not conform to upper bound "+hi1) - case _ => + case TypeBounds(lo1, hi1) if (!(lo1 <:< hi1)) => LowerBoundError(tdef, lo1, hi1) + case _ => () } treeCopy.TypeDef(tdef, typedMods, tdef.name, tparams1, rhs1) setType NoType } @@ -1906,9 +1943,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedCase(cdef: CaseDef, pattpe: Type, pt: Type): CaseDef = { // verify no _* except in last position for (Apply(_, xs) <- cdef.pat ; x <- xs dropRight 1 ; if treeInfo isStar x) - error(x.pos, "_* may only come last") + StarPositionInPatternError(x) - val pat1: Tree = typedPattern(cdef.pat, pattpe) + val pat1 = typedPattern(cdef.pat, pattpe) // When case classes have more than two parameter lists, the pattern ends // up typed as a method. We only pattern match on the first parameter // list, so substitute the final result type of the method, i.e. the type @@ -1952,7 +1989,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val codeExpected = !forMSIL && (pt.typeSymbol isNonBottomSubClass CodeClass) if (numVparams > definitions.MaxFunctionArity) - return errorTree(fun, "implementation restricts functions to " + definitions.MaxFunctionArity + " parameters") + return MaxFunctionArityError(fun) def decompose(pt: Type): (Symbol, List[Type], Type) = if ((isFunctionType(pt) @@ -1968,9 +2005,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType) val (clazz, argpts, respt) = decompose(if (codeExpected) pt.normalize.typeArgs.head else pt) - if (argpts.lengthCompare(numVparams) != 0) - errorTree(fun, "wrong number of parameters; expected = " + argpts.length) + WrongNumberOfParametersError(fun, argpts) else { val vparamSyms = map2(fun.vparams, argpts) { (vparam, argpt) => if (vparam.tpt.isEmpty) { @@ -1980,7 +2016,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { fun match { case etaExpansion(vparams, fn, args) if !codeExpected => silent(_.typed(fn, forFunMode(mode), pt)) match { - case fn1: Tree if context.undetparams.isEmpty => + case SilentResultValue(fn1) if context.undetparams.isEmpty => // if context,undetparams is not empty, the function was polymorphic, // so we need the missing arguments to infer its type. See #871 //println("typing eta "+fun+":"+fn1.tpe+"/"+context.undetparams) @@ -1991,7 +2027,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } case _ => } - error(vparam.pos, missingParameterTypeMsg(fun, vparam, pt)) + MissingParameterTypeError(fun, vparam, pt) ErrorType } if (!vparam.tpt.pos.isDefined) vparam.tpt setPos vparam.pos.focus @@ -2005,12 +2041,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // for (vparam <- vparams) { // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); () // } - var body = typed(fun.body, respt) + val body1 = typed(fun.body, respt) val formals = vparamSyms map (_.tpe) - val restpe = packedType(body, fun.symbol).deconst.resultType + val restpe = packedType(body1, fun.symbol).deconst.resultType val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe) // body = checkNoEscaping.locals(context.scope, restpe, body) - val fun1 = treeCopy.Function(fun, vparams, body).setType(funtpe) + val fun1 = treeCopy.Function(fun, vparams, body1).setType(funtpe) if (codeExpected) lifted(fun1) else fun1 } } @@ -2025,7 +2061,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { unit.toCheck += { () => // go to next outer context which is not silent, see #3614 var c = context - while (!c.reportGeneralErrors) c = c.outer + while (c.bufferErrors) c = c.outer val stats1 = newTyper(c).typedStats(stats, NoSymbol) for (stat <- stats1 if stat.isDef) { val member = stat.symbol @@ -2041,11 +2077,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Some(imp1: Import) => imp1 case None => log("unhandled import: "+imp+" in "+unit); imp } - private def isWarnablePureExpression(tree: Tree) = tree match { case EmptyTree | Literal(Constant(())) => false case _ => - (treeInfo isExprSafeToInline tree) && { + !tree.isErrorTyped && (treeInfo isExprSafeToInline tree) && { val sym = tree.symbol (sym == null) || !(sym.isModule || sym.isLazy) || { debuglog("'Pure' but side-effecting expression in statement position: " + tree) @@ -2059,9 +2094,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def includesTargetPos(tree: Tree) = tree.pos.isRange && context.unit.exists && (tree.pos includes context.unit.targetPos) val localTarget = stats exists includesTargetPos + val statsErrors = scala.collection.mutable.LinkedHashSet[AbsTypeError]() def typedStat(stat: Tree): Tree = { if (context.owner.isRefinementClass && !treeInfo.isDeclarationOrTypeDef(stat)) - errorTree(stat, "only declarations allowed here") + OnlyDeclarationsError(stat) else stat match { case imp @ Import(_, _) => @@ -2074,20 +2110,25 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // the targetposition stat } else { - val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) this - else newTyper(context.make(stat, exprOwner)) + val localTyper = if (inBlock || (stat.isDef && !stat.isInstanceOf[LabelDef])) { + context.flushBuffer() + this + } else newTyper(context.make(stat, exprOwner)) // XXX this creates a spurious dead code warning if an exception is thrown // in a constructor, even if it is the only thing in the constructor. val result = checkDead(localTyper.typed(stat, EXPRmode | BYVALmode, WildcardType)) + if (treeInfo.isSelfOrSuperConstrCall(result)) { context.inConstructorSuffix = true if (treeInfo.isSelfConstrCall(result) && result.symbol.pos.pointOrElse(0) >= exprOwner.enclMethod.pos.pointOrElse(0)) - error(stat.pos, "called constructor's definition must precede calling constructor's definition") + ConstructorsOrderError(stat) } + if (isWarnablePureExpression(result)) context.warning(stat.pos, "a pure expression does nothing in statement position; " + "you may be omitting necessary parentheses" ) + statsErrors ++= localTyper.context.errBuffer result } } @@ -2121,9 +2162,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // error for this is issued in RefChecks.checkDefaultsInOverloaded if (!e.sym.isErroneous && !e1.sym.isErroneous && !e.sym.hasDefaultFlag && !e.sym.hasAnnotation(BridgeClass) && !e1.sym.hasAnnotation(BridgeClass)) { - error(e.sym.pos, e1.sym+" is defined twice"+ - {if(!settings.debug.value) "" else " in "+unit.toString}+ - {if (e.sym.isMacro && e1.sym.isMacro) " \n(note that macros cannot be overloaded)" else ""}) + DefDefinedTwiceError(e.sym, e1.sym) scope.unlink(e1) // need to unlink to avoid later problems with lub; see #2779 } e1 = scope.lookupNextEntry(e1) @@ -2168,14 +2207,20 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { }) ::: newStats.toList } } - val result = stats mapConserve typedStat - if (phase.erasedTypes) result - else checkNoDoubleDefsAndAddSynthetics(result) + + val stats1 = withSavedContext(context) { + val result = stats mapConserve typedStat + context.flushBuffer() + result + } + context.updateBuffer(statsErrors) + if (phase.erasedTypes) stats1 + else checkNoDoubleDefsAndAddSynthetics(stats1) } def typedArg(arg: Tree, mode: Int, newmode: Int, pt: Type): Tree = { val typedMode = onlyStickyModes(mode) | newmode - val t = constrTyperIf((mode & SCCmode) != 0).typed(arg, typedMode, pt) + val t = withCondConstrTyper((mode & SCCmode) != 0)(_.typed(arg, typedMode, pt)) checkDead.inMode(typedMode, t) } @@ -2244,8 +2289,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def doTypedApply(tree: Tree, fun0: Tree, args: List[Tree], mode: Int, pt: Type): Tree = { // TODO_NMT: check the assumption that args nonEmpty - def errTree = setError(treeCopy.Apply(tree, fun0, args)) - def errorTree(msg: String) = { error(tree.pos, msg); errTree } + def duplErrTree = setError(treeCopy.Apply(tree, fun0, args)) + def duplErrorTree(err: AbsTypeError) = { issue(err); duplErrTree } var fun = fun0 if (fun.hasSymbol && fun.symbol.isOverloaded) { @@ -2308,8 +2353,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { arg1 } context.undetparams = undetparams - inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args)) - doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt) + if (context.hasErrors) + setError(tree) + else { + inferMethodAlternative(fun, undetparams, argtpes.toList, pt, varArgsOnly = treeInfo.isWildcardStarArgList(args)) + doTypedApply(tree, adapt(fun, forFunMode(mode), WildcardType), args1, mode, pt) + } case mt @ MethodType(params, _) => val paramTypes = mt.paramTypes @@ -2329,7 +2378,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // the inner "doTypedApply" does "extractUndetparams" => restore when it fails val savedUndetparams = context.undetparams silent(_.doTypedApply(tree, fun, tupleArgs, mode, pt)) match { - case t: Tree => + case SilentResultValue(t) => // Depending on user options, may warn or error here if // a Unit or tuple was inserted. Some(t) filter (tupledTree => @@ -2337,7 +2386,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { || tupledTree.symbol == null || checkValidAdaptation(tupledTree, args) ) - case ex => + case _ => context.undetparams = savedUndetparams None } @@ -2352,21 +2401,21 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def tryNamesDefaults: Tree = { val lencmp = compareLengths(args, formals) - if (mt.isErroneous) errTree - else if (inPatternMode(mode)) + if (mt.isErroneous) duplErrTree + else if (inPatternMode(mode)) { // #2064 - errorTree("wrong number of arguments for "+ treeSymTypeMsg(fun)) - else if (lencmp > 0) { - tryTupleApply getOrElse errorTree("too many arguments for "+treeSymTypeMsg(fun)) + duplErrorTree(WrongNumberOfArgsError(tree, fun)) + } else if (lencmp > 0) { + tryTupleApply getOrElse duplErrorTree(TooManyArgsNamesDefaultsError(tree, fun)) } else if (lencmp == 0) { // we don't need defaults. names were used, so this application is transformed // into a block (@see transformNamedApplication in NamesDefaults) val (namelessArgs, argPos) = removeNames(Typer.this)(args, params) if (namelessArgs exists (_.isErroneous)) { - errTree + duplErrTree } else if (!isIdentity(argPos) && !sameLength(formals, params)) // !isIdentity indicates that named arguments are used to re-order arguments - errorTree("when using named arguments, the vararg parameter has to be specified exactly once") + duplErrorTree(MultipleVarargError(tree)) else if (isIdentity(argPos) && !isNamedApplyBlock(fun)) { // if there's no re-ordering, and fun is not transformed, no need to transform // more than an optimization, e.g. important in "synchronized { x = update-x }" @@ -2380,7 +2429,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // calls to the default getters. Example: // foo[Int](a)() ==> foo[Int](a)(b = foo$qual.foo$default$2[Int](a)) val fun1 = transformNamedApplication(Typer.this, mode, pt)(fun, x => x) - if (fun1.isErroneous) errTree + if (fun1.isErroneous) duplErrTree else { assert(isNamedApplyBlock(fun1), fun1) val NamedApplyInfo(qual, targs, previousArgss, _) = context.namedApplyBlockInfo.get._2 @@ -2397,17 +2446,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val lencmp2 = compareLengths(allArgs, formals) if (!sameLength(allArgs, args) && callToCompanionConstr(context, funSym)) { - errorTree("module extending its companion class cannot use default constructor arguments") + duplErrorTree(ModuleUsingCompanionClassDefaultArgsErrror(tree)) } else if (lencmp2 > 0) { removeNames(Typer.this)(allArgs, params) // #3818 - errTree + duplErrTree } else if (lencmp2 == 0) { // useful when a default doesn't match parameter type, e.g. def f[T](x:T="a"); f[Int]() val note = "Error occurred in an application involving default arguments." if (!(context.diagnostic contains note)) context.diagnostic = note :: context.diagnostic doTypedApply(tree, if (blockIsEmpty) fun else fun1, allArgs, mode, pt) } else { - tryTupleApply getOrElse errorTree(notEnoughArgumentsMsg(fun, missing)) + tryTupleApply getOrElse duplErrorTree(NotEnoughArgsError(tree, fun, missing)) } } } @@ -2466,7 +2515,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { atPos(tree.pos)(gen.mkNil setType restpe) else constfold(treeCopy.Apply(tree, fun, args1) setType ifPatternSkipFormals(restpe)) - } else if (needsInstantiation(tparams, formals, args)) { //println("needs inst "+fun+" "+tparams+"/"+(tparams map (_.info))) inferExprInstance(fun, tparams) @@ -2488,11 +2536,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (!argtparams.isEmpty) { val strictPt = formal.instantiateTypeParams(tparams, strictTargs) inferArgumentInstance(arg1, argtparams, strictPt, lenientPt) - } - arg1 + arg1 + } else arg1 } val args1 = map2(args, formals)(typedArgToPoly) - if (args1 exists (_.tpe.isError)) errTree + if (args1 exists {_.isErrorTyped}) duplErrTree else { debuglog("infer method inst "+fun+", tparams = "+tparams+", args = "+args1.map(_.tpe)+", pt = "+pt+", lobounds = "+tparams.map(_.tpe.bounds.lo)+", parambounds = "+tparams.map(_.info)) //debug // define the undetparams which have been fixed by this param list, replace the corresponding symbols in "fun" @@ -2509,12 +2557,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { doTypedApply(tree, fun setType fun.tpe.widen, args, mode, pt) case ErrorType => - setError(treeCopy.Apply(tree, fun, args)) + if (!tree.isErrorTyped) setError(tree) else tree + // @H change to setError(treeCopy.Apply(tree, fun, args)) /* --- begin unapply --- */ case otpe if inPatternMode(mode) && unapplyMember(otpe).exists => if (args.length > MaxTupleArity) - error(fun.pos, "too many arguments for unapply pattern, maximum = "+MaxTupleArity) + return duplErrorTree(TooManyArgsPatternError(fun)) // def freshArgType(tp: Type): (List[Symbol], Type) = tp match { @@ -2522,11 +2571,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { (Nil, param.tpe) case PolyType(tparams, restpe) => createFromClonedSymbols(tparams, freshArgType(restpe)._2)((ps, t) => ((ps, t))) + // No longer used, see test case neg/t960.scala (#960 has nothing to do with it) case OverloadedType(_, _) => - error(fun.pos, "cannot resolve overloaded unapply") + OverloadedUnapplyError(fun) (Nil, ErrorType) case _ => - error(fun.pos, "an unapply method must accept a single argument.") + UnapplyWithSingleArgError(fun) (Nil, ErrorType) } @@ -2542,7 +2592,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { freeVars foreach unapplyContext.scope.enter val typer1 = newTyper(unapplyContext) - val pattp = typer1.infer.inferTypedPattern(tree.pos, unappFormal, arg.tpe) + val pattp = typer1.infer.inferTypedPattern(tree, unappFormal, arg.tpe) // turn any unresolved type variables in freevars into existential skolems val skolems = freeVars map (fv => unapplyContext.owner.newExistentialSkolem(fv, fv)) @@ -2553,8 +2603,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // setType null is necessary so that ref will be stabilized; see bug 881 val fun1 = typedPos(fun.pos)(Apply(Select(fun setType null, unapp), List(arg))) - if (fun1.tpe.isErroneous) errTree - else { + if (fun1.tpe.isErroneous) { + duplErrTree + } else { val formals0 = unapplyTypeList(fun1.symbol, fun1.tpe) val formals1 = formalTypes(formals0, args.length) if (sameLength(formals1, args)) { @@ -2567,15 +2618,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val itype = glb(List(pt1, arg.tpe)) arg.tpe = pt1 // restore type (arg is a dummy tree, just needs to pass typechecking) UnApply(fun1, args1) setPos tree.pos setType itype - } - else { - errorTree("wrong number of arguments for "+treeSymTypeMsg(fun)) - } + } else + duplErrorTree(WrongNumberArgsPatternError(tree, fun)) } /* --- end unapply --- */ case _ => - errorTree(fun.tpe+" does not take parameters") + duplErrorTree(ApplyWithoutArgsError(tree, fun)) } } @@ -2587,8 +2636,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedAnnotation(ann: Tree, mode: Int = EXPRmode, selfsym: Symbol = NoSymbol, annClass: Symbol = AnnotationClass, requireJava: Boolean = false): AnnotationInfo = { lazy val annotationError = AnnotationInfo(ErrorType, Nil, Nil) var hasError: Boolean = false - def error(pos: Position, msg: String) = { - context.error(pos, msg) + val pending = ListBuffer[AbsTypeError]() + + def reportAnnotationError(err: AbsTypeError) = { + pending += err hasError = true annotationError } @@ -2604,13 +2655,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case tpe => null } } - def fail(msg: String) = { error(tr.pos, msg) ; None } - if (const == null) - fail("annotation argument needs to be a constant; found: " + tr) - else if (const.value == null) - fail("annotation argument cannot be null") - else + if (const == null) { + reportAnnotationError(AnnotationNotAConstantError(tr)); None + } else if (const.value == null) { + reportAnnotationError(AnnotationArgNullError(tr)); None + } else Some(LiteralAnnotArg(const)) } @@ -2619,16 +2669,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { */ def tree2ConstArg(tree: Tree, pt: Type): Option[ClassfileAnnotArg] = tree match { case Apply(Select(New(tpt), nme.CONSTRUCTOR), args) if (pt.typeSymbol == ArrayClass) => - error(tree.pos, "Array constants have to be specified using the `Array(...)' factory method") - None + reportAnnotationError(ArrayConstantsError(tree)); None case ann @ Apply(Select(New(tpt), nme.CONSTRUCTOR), args) => val annInfo = typedAnnotation(ann, mode, NoSymbol, pt.typeSymbol, true) - if (annInfo.atp.isErroneous) { - // recursive typedAnnotation call already printed an error, so don't call "error" - hasError = true - None - } else Some(NestedAnnotArg(annInfo)) + if (annInfo.atp.isErroneous) { hasError = true; None } + else Some(NestedAnnotArg(annInfo)) // use of Array.apply[T: ClassManifest](xs: T*): Array[T] // and Array.apply(x: Int, xs: Int*): Array[Int] (and similar) @@ -2643,13 +2689,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // BT = Int, .., String, Class[_], JavaAnnotClass // T = BT | Array[BT] // So an array literal as argument can only be valid if pt is Array[_] - error(tree.pos, "found array constant, expected argument of type "+ pt) + reportAnnotationError(ArrayConstantsTypeMismatchError(tree, pt)) None } - else - tryConst(tree, pt) + else tryConst(tree, pt) - case Typed(t, _) => tree2ConstArg(t, pt) + case Typed(t, _) => + tree2ConstArg(t, pt) case tree => tryConst(tree, pt) @@ -2669,13 +2715,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Select(New(tpt), nme.CONSTRUCTOR) => (fun, outerArgss) case _ => - error(fun.pos, "unexpected tree in annotation: "+ fun) + reportAnnotationError(UnexpectedTreeAnnotation(fun)) (setError(fun), outerArgss) } extract(ann, List()) } - if (fun.isErroneous) annotationError + val res = if (fun.isErroneous) annotationError else { val typedFun @ Select(New(tpt), _) = typed(fun, forFunMode(mode), WildcardType) val annType = tpt.tpe @@ -2685,9 +2731,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // annotation to be saved as java classfile annotation val isJava = typedFun.symbol.owner.isJavaDefined if (!annType.typeSymbol.isNonBottomSubClass(annClass)) { - error(tpt.pos, "expected annotation of type "+ annClass.tpe +", found "+ annType) + reportAnnotationError(AnnotationTypeMismatchError(tpt, annClass.tpe, annType)) } else if (argss.length > 1) { - error(ann.pos, "multiple argument lists on classfile annotation") + reportAnnotationError(MultipleArgumentListForAnnotationError(ann)) } else { val args = if (argss.head.length == 1 && !isNamed(argss.head.head)) @@ -2703,10 +2749,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val sym = if (isJava) annScope.lookup(name) else typedFun.tpe.params.find(p => p.name == name).getOrElse(NoSymbol) if (sym == NoSymbol) { - error(arg.pos, "unknown annotation argument name: " + name) + reportAnnotationError(UnknownAnnotationNameError(arg, name)) (nme.ERROR, None) } else if (!names.contains(sym)) { - error(arg.pos, "duplicate value for annotation argument " + name) + reportAnnotationError(DuplicateValueAnnotationError(arg, name)) (nme.ERROR, None) } else { names -= sym @@ -2715,21 +2761,21 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { (sym.name, annArg) } case arg => - error(arg.pos, "classfile annotation arguments have to be supplied as named arguments") + reportAnnotationError(ClassfileAnnotationsAsNamedArgsError(arg)) (nme.ERROR, None) } for (sym <- names) { // make sure the flags are up to date before erroring (jvm/t3415 fails otherwise) sym.initialize if (!sym.hasAnnotation(AnnotationDefaultAttr) && !sym.hasDefaultFlag) - error(ann.pos, "annotation " + annType.typeSymbol.fullName + " is missing argument " + sym.name) + reportAnnotationError(AnnotationMissingArgError(ann, annType, sym)) } if (hasError) annotationError else AnnotationInfo(annType, List(), nvPairs map {p => (p._1, p._2.get)}).setPos(ann.pos) } } else if (requireJava) { - error(ann.pos, "nested classfile annotations must be defined in java; found: "+ annType) + reportAnnotationError(NestedAnnotationError(ann, annType)) } else { val typedAnn = if (selfsym == NoSymbol) { typed(ann, mode, annClass.tpe) @@ -2781,7 +2827,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { annInfo(fun) case _ => - error(t.pos, "unexpected tree after typing annotation: "+ typedAnn) + reportAnnotationError(UnexpectedTreeAnnotationError(t, typedAnn)) } if (annType.typeSymbol == DeprecatedAttr && argss.flatten.size < 2) @@ -2791,6 +2837,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { else annInfo(typedAnn) } } + + if (hasError) { + pending.foreach(ErrorUtils.issueTypeError) + annotationError + } else res } def isRawParameter(sym: Symbol) = // is it a type parameter leaked by a raw type? @@ -2890,7 +2941,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (sym.isAliasType && containsLocal(tp)) apply(tp.normalize) else { if (pre.isVolatile) - context.error(tree.pos, "Inferred type "+tree.tpe+" contains type selection from volatile type "+pre) + InferTypeWithVolatileTypeSelectionError(tree, pre) mapOver(tp) } case _ => @@ -2907,8 +2958,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { localSyms += sym remainingSyms += sym } else { - unit.error(tree.pos, - "can't existentially abstract over parameterized type " + tp) + AbstractExistentiallyOverParamerizedTpeError(tree, tp) } } } @@ -2977,10 +3027,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { new DeSkolemizeMap mapOver tp } - def typedClassOf(tree: Tree, tpt: Tree) = { - checkClassType(tpt, true, false) - atPos(tree.pos)(gen.mkClassOf(tpt.tpe)) - } + def typedClassOf(tree: Tree, tpt: Tree, noGen: Boolean = false) = + if (!checkClassType(tpt, true, false) && noGen) tpt + else atPos(tree.pos)(gen.mkClassOf(tpt.tpe)) protected def typedExistentialTypeTree(tree: ExistentialTypeTree, mode: Int): Tree = { for (wc <- tree.whereClauses) @@ -2989,7 +3038,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val whereClauses1 = typedStats(tree.whereClauses, context.owner) for (vd @ ValDef(_, _, _, _) <- tree.whereClauses) if (vd.symbol.tpe.isVolatile) - error(vd.pos, "illegal abstraction from value with volatile type "+vd.symbol.tpe) + AbstractionFromVolatileTypeError(vd) val tpt1 = typedType(tree.tpt, mode) existentialTransform(tree.whereClauses map (_.symbol), tpt1.tpe)((tparams, tp) => TypeTree(newExistentialType(tparams, tp)) setOriginal tree @@ -3012,7 +3061,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // Martin, I'm using fake trees, because, if you use args or arg.map(typedType), // inferPolyAlternatives loops... -- I have no idea why :-( // ...actually this was looping anyway, see bug #278. - return errorTree(fun, "wrong number of type parameters for "+treeSymTypeMsg(fun)) + return TypedApplyWrongNumberOfTpeParametersError(fun, fun) typedTypeApply(tree, mode, fun, args1) case SingleType(_, _) => @@ -3020,12 +3069,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case PolyType(tparams, restpe) if tparams.nonEmpty => if (sameLength(tparams, args)) { val targs = args map (_.tpe) - checkBounds(tree.pos, NoPrefix, NoSymbol, tparams, targs, "") + checkBounds(tree, NoPrefix, NoSymbol, tparams, targs, "") if (fun.symbol == Predef_classOf) - typedClassOf(tree, args.head) + typedClassOf(tree, args.head, true) else { if (!isPastTyper && fun.symbol == Any_isInstanceOf && !targs.isEmpty) - checkCheckable(tree.pos, targs.head, "") + checkCheckable(tree, targs.head, "") val resultpe = restpe.instantiateTypeParams(tparams, targs) //@M substitution in instantiateParams needs to be careful! //@M example: class Foo[a] { def foo[m[x]]: m[a] = error("") } (new Foo[Int]).foo[List] : List[Int] @@ -3036,12 +3085,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { treeCopy.TypeApply(tree, fun, args) setType resultpe } } else { - errorTree(tree, "wrong number of type parameters for "+treeSymTypeMsg(fun)) + TypedApplyWrongNumberOfTpeParametersError(tree, fun) } case ErrorType => setError(treeCopy.TypeApply(tree, fun, args)) case _ => - errorTree(tree, treeSymTypeMsg(fun)+" does not take type parameters.") + TypedApplyDoesNotTakeTpeParametersError(tree, fun) } @inline final def deindentTyping() = context.typingIndentLevel -= 2 @@ -3110,6 +3159,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // this annotation did not need it if (ainfo.isErroneous) + // Erroneous annotations were already reported in typedAnnotation arg1 // simply drop erroneous annotations else { ann.tpe = atype @@ -3149,7 +3199,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { vble = context.owner.newValue(name, tree.pos) if (vble.name.toTermName != nme.WILDCARD) { if ((mode & ALTmode) != 0) - error(tree.pos, "illegal variable in pattern alternative") + VariableInPatternAlternativeError(tree) vble = namer.enterInScope(vble) } val body1 = typed(body, mode, pt) @@ -3176,18 +3226,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedAssign(lhs: Tree, rhs: Tree): Tree = { val lhs1 = typed(lhs, EXPRmode | LHSmode, WildcardType) val varsym = lhs1.symbol - def failMsg = - if (varsym != null && varsym.isValue) "reassignment to val" - else "assignment to non variable" - def fail = { - if (!lhs1.tpe.isError) - error(tree.pos, failMsg) + // see #2494 for double error message example + def fail() = + if (lhs1.isErrorTyped) lhs1 + else AssignmentError(tree, varsym) - setError(tree) - } if (varsym == null) - return fail + return fail() if (treeInfo.mayBeVarGetter(varsym)) { treeInfo.methPart(lhs1) match { @@ -3203,7 +3249,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val rhs1 = typed(rhs, EXPRmode | BYVALmode, lhs1.tpe) treeCopy.Assign(tree, lhs1, checkDead(rhs1)) setType UnitClass.tpe } - else fail + else fail() } def typedIf(cond: Tree, thenp: Tree, elsep: Tree) = { @@ -3273,12 +3319,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { enclMethod.owner.isConstructor || context.enclClass.enclMethod == enclMethod // i.e., we are in a constructor of a local class ) { - errorTree(tree, "return outside method definition") + ReturnOutsideOfDefError(tree) } else { val DefDef(_, name, _, _, restpt, _) = enclMethod.tree - if (restpt.tpe eq null) - errorTree(tree, enclMethod.owner + " has return statement; needs result type") - else { + if (restpt.tpe eq null) { + ReturnWithoutTypeError(tree, enclMethod.owner) + } else { context.enclMethod.returnsSeen = true val expr1: Tree = typed(expr, EXPRmode | BYVALmode, restpt.tpe) // Warn about returning a value if no value can be returned. @@ -3297,12 +3343,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedNew(tpt: Tree) = { val tpt1 = { val tpt0 = typedTypeConstructor(tpt) - checkClassType(tpt0, false, true) - if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) { - context.undetparams = cloneSymbols(tpt0.symbol.typeParams) - TypeTree().setOriginal(tpt0) - .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347. - } else tpt0 + if (checkClassType(tpt0, false, true)) + if (tpt0.hasSymbol && !tpt0.symbol.typeParams.isEmpty) { + context.undetparams = cloneSymbols(tpt0.symbol.typeParams) + TypeTree().setOriginal(tpt0) + .setType(appliedType(tpt0.tpe, context.undetparams map (_.tpeHK))) // @PP: tpeHK! #3343, #4018, #4347. + } else tpt0 + else tpt0 } /** If current tree appears in > @@ -3321,17 +3368,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val tp = tpt1.tpe val sym = tp.typeSymbol.initialize if (sym.isAbstractType || sym.hasAbstractFlag) - error(tree.pos, sym + " is abstract; cannot be instantiated") + IsAbstractError(tree, sym) else if (!( tp == sym.thisSym.tpe // when there's no explicit self type -- with (#3612) or without self variable // sym.thisSym.tpe == tp.typeOfThis (except for objects) || narrowRhs(tp) <:< tp.typeOfThis || phase.erasedTypes )) { - error(tree.pos, sym + - " cannot be instantiated because it does not conform to its self-type "+ - tp.typeOfThis) - } - treeCopy.New(tree, tpt1).setType(tp) + DoesNotConformToSelfTypeError(tree, sym, tp.typeOfThis) + } else + treeCopy.New(tree, tpt1).setType(tp) } def typedEta(expr1: Tree): Tree = expr1.tpe match { @@ -3369,22 +3414,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case ErrorType => expr1 case _ => - errorTree(expr1, "_ must follow method; cannot follow " + expr1.tpe) + UnderscoreEtaError(expr1) } /** * @param args ... * @return ... */ - def tryTypedArgs(args: List[Tree], mode: Int, other: TypeError): List[Tree] = { + def tryTypedArgs(args: List[Tree], mode: Int): Option[List[Tree]] = { val c = context.makeSilent(false) c.retyping = true try { - newTyper(c).typedArgs(args, mode) + val res = newTyper(c).typedArgs(args, mode) + if (c.hasErrors) None else Some(res) } catch { - case ex: CyclicReference => throw ex - case ex: TypeError => - null + case ex: CyclicReference => + throw ex + case te: TypeError => + // @H some of typer erros can still leak, + // for instance in continuations + None + } finally { + c.flushBuffer() } } @@ -3393,10 +3444,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { */ def tryTypedApply(fun: Tree, args: List[Tree]): Tree = { val start = startTimer(failedApplyNanos) - silent(_.doTypedApply(tree, fun, args, mode, pt)) match { - case t: Tree => - t - case ex: TypeError => + + def onError(typeError: AbsTypeError): Tree = { stopTimer(failedApplyNanos, start) // If the problem is with raw types, copnvert to existentials and try again. @@ -3419,27 +3468,39 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Typed(r, Function(Nil, EmptyTree)) => treesInResult(r) case _ => Nil }) - def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == ex.pos) - val retry = fun :: tree :: args exists errorInResult + def errorInResult(tree: Tree) = treesInResult(tree) exists (_.pos == typeError.errPos) + + val retry = (typeError.errPos != null) && (fun :: tree :: args exists errorInResult) printTyping { val funStr = ptTree(fun) + " and " + (args map ptTree mkString ", ") if (retry) "second try: " + funStr - else "no second try: " + funStr + " because error not in result: " + ex.pos+"!="+tree.pos + else "no second try: " + funStr + " because error not in result: " + typeError.errPos+"!="+tree.pos } if (retry) { val Select(qual, name) = fun - val args1 = tryTypedArgs(args, forArgMode(fun, mode), ex) - val qual1 = - if ((args1 ne null) && !pt.isError) adaptToArguments(qual, name, args1, pt) - else qual - if (qual1 ne qual) { - val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos - return typed1(tree1, mode | SNDTRYmode, pt) + tryTypedArgs(args, forArgMode(fun, mode)) match { + case Some(args1) => + assert((args1.length == 0) || !args1.head.tpe.isErroneous, "try typed args is ok") + val qual1 = + if (!pt.isError) adaptToArguments(qual, name, args1, pt, true, true) + else qual + if (qual1 ne qual) { + val tree1 = Apply(Select(qual1, name) setPos fun.pos, args1) setPos tree.pos + return typed1(tree1, mode | SNDTRYmode, pt) + } + case _ => () } } - reportTypeError(tree.pos, ex) + issue(typeError) setError(treeCopy.Apply(tree, fun, args)) } + + silent(_.doTypedApply(tree, fun, args, mode, pt)) match { + case SilentResultValue(t) => + t + case SilentTypeError(err) => + onError(err) + } } def typedApply(fun: Tree, args: List[Tree]) = { @@ -3451,10 +3512,28 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val funpt = if (isPatternMode) pt else WildcardType val appStart = startTimer(failedApplyNanos) val opeqStart = startTimer(failedOpEqNanos) + + def onError(reportError: => Tree): Tree = { + fun match { + case Select(qual, name) + if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) => + val qual1 = typedQualifier(qual) + if (treeInfo.isVariableOrGetter(qual1)) { + stopTimer(failedOpEqNanos, opeqStart) + convertToAssignment(fun, qual1, name, args) + } else { + stopTimer(failedApplyNanos, appStart) + reportError + } + case _ => + stopTimer(failedApplyNanos, appStart) + reportError + } + } silent(_.typed(fun, forFunMode(mode), funpt), - if ((mode & EXPRmode) != 0) false else context.reportAmbiguousErrors, + if ((mode & EXPRmode) != 0) false else context.ambiguousErrors, if ((mode & EXPRmode) != 0) tree else context.tree) match { - case fun1: Tree => + case SilentResultValue(fun1) => val fun2 = if (stableApplication) stabilizeFun(fun1, mode, pt) else fun1 incCounter(typedApplyCount) def isImplicitMethod(tpe: Type) = tpe match { @@ -3481,7 +3560,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { //if (fun2.hasSymbol && fun2.symbol.name == nme.apply && fun2.symbol.owner == ArrayClass) { // But this causes cyclic reference for Array class in Cleanup. It is easy to overcome this // by calling ArrayClass.info here (or some other place before specialize). - if (fun2.symbol == Array_apply) { + if (fun2.symbol == Array_apply && !res.isErrorTyped) { val checked = gen.mkCheckInit(res) // this check is needed to avoid infinite recursion in Duplicators // (calling typed1 more than once for the same tree) @@ -3489,30 +3568,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { else res } else res - case ex: TypeError => - fun match { - case Select(qual, name) - if !isPatternMode && nme.isOpAssignmentName(newTermName(name.decode)) => - val qual1 = typedQualifier(qual) - if (treeInfo.isVariableOrGetter(qual1)) { - stopTimer(failedOpEqNanos, opeqStart) - convertToAssignment(fun, qual1, name, args, ex) - } - else { - stopTimer(failedApplyNanos, appStart) - reportTypeError(fun.pos, ex) - setError(tree) - } - case _ => - stopTimer(failedApplyNanos, appStart) - reportTypeError(fun.pos, ex) - setError(tree) - } + case SilentTypeError(err) => + onError({issue(err); setError(tree)}) } } } - def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree], ex: TypeError): Tree = { + def convertToAssignment(fun: Tree, qual: Tree, name: Name, args: List[Tree]): Tree = { val prefix = name.subName(0, name.length - nme.EQL.length) def mkAssign(vble: Tree): Tree = Assign( @@ -3553,25 +3615,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Apply(fn, indices) => treeInfo.methPart(fn) match { case Select(table, nme.apply) => mkUpdate(table, indices) - case _ => errorTree(qual, "Unexpected tree during assignment conversion.") + case _ => UnexpectedTreeAssignmentConversionError(qual) } } typed1(tree1, mode, pt) -/* - debuglog("retry assign: "+tree1) - silent(_.typed1(tree1, mode, pt)) match { - case t: Tree => - t - case _ => - reportTypeError(tree.pos, ex) - setError(tree) - } -*/ } - def qualifyingClassSym(qual: Name): Symbol = - if (tree.symbol != NoSymbol) tree.symbol else qualifyingClass(tree, qual, false) - def typedSuper(qual: Tree, mix: TypeName) = { val qual1 = typed(qual) @@ -3596,12 +3645,13 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // println(mix) // the reference to super class got lost during erasure restrictionError(tree.pos, unit, "traits may not select fields or methods from super[C] where C is a class") + ErrorType } else { - error(tree.pos, mix+" does not name a parent class of "+clazz) + MixinMissingParentClassNameError(tree, mix, clazz) + ErrorType } - ErrorType } else if (!ps.tail.isEmpty) { - error(tree.pos, "ambiguous parent class qualifier") + AmbiguousParentClassError(tree) ErrorType } else { ps.head @@ -3618,16 +3668,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { findMixinSuper(clazz.tpe) } - treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype) - } + treeCopy.Super(tree, qual1, mix) setType SuperType(clazz.thisType, owntype) + } def typedThis(qual: Name) = { - val clazz = qualifyingClassSym(qual) - if (clazz == NoSymbol) setError(tree) - else { - tree setSymbol clazz setType clazz.thisType.underlying - if (isStableContext(tree, mode, pt)) tree setType clazz.thisType - tree + val qualifyingClassSym = if (tree.symbol != NoSymbol) Some(tree.symbol) else qualifyingClass(tree, qual) + qualifyingClassSym match { + case Some(clazz) => + tree setSymbol clazz setType clazz.thisType.underlying + if (isStableContext(tree, mode, pt)) tree setType clazz.thisType + tree + case None => tree } } @@ -3659,10 +3710,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (sym == NoSymbol && name != nme.CONSTRUCTOR && (mode & EXPRmode) != 0) { val qual1 = if (member(qual, name) != NoSymbol) qual - else adaptToMemberWithArgs(tree, qual, name, mode) - if (qual1 ne qual) return typed(treeCopy.Select(tree, qual1, name), mode, pt) - } + else adaptToMemberWithArgs(tree, qual, name, mode, true, true) + if (qual1 ne qual) + return typed(treeCopy.Select(tree, qual1, name), mode, pt) + } if (!reallyExists(sym)) { if (context.owner.toplevelClass.isJavaDefined && name.isTypeName) { val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) } @@ -3691,7 +3743,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { ) } - def makeErrorTree = { + def makeInteractiveErrorTree = { val tree1 = tree match { case Select(_, _) => treeCopy.Select(tree, qual, name) case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) @@ -3700,24 +3752,31 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } if (name == nme.ERROR && forInteractive) - return makeErrorTree + return makeInteractiveErrorTree if (!qual.tpe.widen.isErroneous) { if ((mode & QUALmode) != 0) { val lastTry = missingHook(qual.tpe.typeSymbol, name) if (lastTry != NoSymbol) return typed1(tree setSymbol lastTry, mode, pt) } - notAMemberError(tree.pos, qual, name) + NotAMemberError(tree, qual, name) } - if (forInteractive) makeErrorTree else setError(tree) + if (forInteractive) makeInteractiveErrorTree else setError(tree) } else { val tree1 = tree match { case Select(_, _) => treeCopy.Select(tree, qual, name) case SelectFromTypeTree(_, _) => treeCopy.SelectFromTypeTree(tree, qual, name) } - val (tree2, pre2) = makeAccessible(tree1, sym, qual.tpe, qual) - val result = stabilize(tree2, pre2, mode, pt) + val (result, accessibleError) = silent(_.makeAccessible(tree1, sym, qual.tpe, qual)) match { + case SilentTypeError(err) => + if (err.kind != ErrorKinds.Access) { + context issue err + return setError(tree) + } else (tree1, Some(err)) + case SilentResultValue(treeAndPre) => + (stabilize(treeAndPre._1, treeAndPre._2, mode, pt), None) + } def isPotentialNullDeference() = { !isPastTyper && @@ -3736,16 +3795,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { result, (TypeTreeWithDeferredRefCheck(){ () => val tp = qual.tpe; val sym = tp.typeSymbolDirect // will execute during refchecks -- TODO: make private checkTypeRef in refchecks public and call that one? - checkBounds(qual.pos, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "") + checkBounds(qual, tp.prefix, sym.owner, sym.typeParams, tp.typeArgs, "") qual // you only get to see the wrapped tree after running this check :-p }) setType qual.tpe setPos qual.pos, name) - case accErr: Inferencer#AccessError => - val qual1 = - try adaptToMemberWithArgs(tree, qual, name, mode) - catch { case _: TypeError => qual } - if (qual1 ne qual) typed(Select(qual1, name) setPos tree.pos, mode, pt) - else accErr.emit() + case _ if accessibleError.isDefined => + val qual1 = adaptToMemberWithArgs(tree, qual, name, mode, false, false) + if (!qual1.isErrorTyped && (qual1 ne qual)) + typed(Select(qual1, name) setPos tree.pos, mode, pt) + else { + issue(accessibleError.get) + setError(tree) + } case _ => result } @@ -3753,7 +3814,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // getClass, we have to catch it immediately so expressions // like x.getClass().newInstance() are typed with the type of x. val isRefinableGetClass = ( - selection.symbol.name == nme.getClass_ + !selection.isErrorTyped + && selection.symbol.name == nme.getClass_ && selection.tpe.params.isEmpty // TODO: If the type of the qualifier is inaccessible, we can cause private types // to escape scope here, e.g. pos/t1107. I'm not sure how to properly handle this @@ -3761,7 +3823,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { && qual.tpe.typeSymbol.isPublic ) if (isRefinableGetClass) - selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe)) + selection setType MethodType(Nil, erasure.getClassReturnType(qual.tpe)) else selection } @@ -3775,8 +3837,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { * (2) Change imported symbols to selections */ def typedIdent(name: Name): Tree = { - def ambiguousError(msg: String) = - error(tree.pos, "reference to " + name + " is ambiguous;\n" + msg) + var errorContainer: AbsTypeError = null + @inline + def ambiguousError(msg: String) = { + assert(errorContainer == null, "Cannot set ambiguous error twice for identifier") + errorContainer = AmbiguousIdentError(tree, name, msg) + } + @inline + def identError(tree: AbsTypeError) = { + assert(errorContainer == null, "Cannot set ambiguous error twice for identifier") + errorContainer = tree + } var defSym: Symbol = tree.symbol // the directly found symbol var pre: Type = NoPrefix // the prefix type of defSym, if a class member @@ -3896,7 +3967,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { ambiguousError( "it is imported twice in the same scope by\n"+imports.head + "\nand "+imports1.head) } - while (!imports1.isEmpty && + while (errorContainer == null && !imports1.isEmpty && (!imports.head.isExplicitImport(name) || imports1.head.depth == imports.head.depth)) { var impSym1 = imports1.head.importedSymbol(name) @@ -3930,72 +4001,47 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (inaccessibleSym eq NoSymbol) { // Avoiding some spurious error messages: see SI-2388. if (reporter.hasErrors && (name startsWith tpnme.ANON_CLASS_NAME)) () - else { - // This laborious determination arrived at to keep the tests working. - val calcSimilar = ( - name.length > 2 && ( - startingIdentContext.reportGeneralErrors - || startingIdentContext.enclClassOrMethod.reportGeneralErrors - ) - ) - // avoid calculating if we're in "silent" mode. - // name length check to limit unhelpful suggestions for e.g. "x" and "b1" - val similar = { - if (!calcSimilar) "" - else { - val allowed = ( - startingIdentContext.enclosingContextChain - flatMap (ctx => ctx.scope.toList ++ ctx.imports.flatMap(_.allImportedSymbols)) - filter (sym => sym.isTerm == name.isTermName) - filterNot (sym => sym.isPackage || sym.isSynthetic || sym.hasMeaninglessName) - ) - val allowedStrings = ( - allowed.map("" + _.name).distinct.sorted - filterNot (s => (s contains '$') || (s contains ' ')) - ) - similarString("" + name, allowedStrings) - } - } - error(tree.pos, "not found: "+decodeWithKind(name, context.owner) + similar) - } - } - else new AccessError( - tree, inaccessibleSym, context.enclClass.owner.thisType, - inaccessibleExplanation - ).emit() + else identError(SymbolNotFoundError(tree, name, context.owner, startingIdentContext)) + } else + identError(InferErrorGen.AccessError( + tree, inaccessibleSym, context.enclClass.owner.thisType, context.enclClass.owner, + inaccessibleExplanation + )) defSym = context.owner.newErrorSymbol(name) } } } - if (defSym.owner.isPackageClass) - pre = defSym.owner.thisType + if (errorContainer != null) { + ErrorUtils.issueTypeError(errorContainer) + setError(tree) + } else { + if (defSym.owner.isPackageClass) + pre = defSym.owner.thisType - // Inferring classOf type parameter from expected type. - if (defSym.isThisSym) { - typed1(This(defSym.owner) setPos tree.pos, mode, pt) - } - // Inferring classOf type parameter from expected type. Otherwise an - // actual call to the stubbed classOf method is generated, returning null. - else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) - typedClassOf(tree, TypeTree(pt.typeArgs.head)) - else { - val tree1 = ( - if (qual == EmptyTree) tree - // atPos necessary because qualifier might come from startContext - else atPos(tree.pos)(Select(qual, name)) - ) - val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual) - // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right? - stabilize(tree2, pre2, mode, pt) match { - case accErr: Inferencer#AccessError => accErr.emit() - case result => result + // Inferring classOf type parameter from expected type. + if (defSym.isThisSym) { + typed1(This(defSym.owner) setPos tree.pos, mode, pt) + } + // Inferring classOf type parameter from expected type. Otherwise an + // actual call to the stubbed classOf method is generated, returning null. + else if (isPredefMemberNamed(defSym, nme.classOf) && pt.typeSymbol == ClassClass && pt.typeArgs.nonEmpty) + typedClassOf(tree, TypeTree(pt.typeArgs.head)) + else { + val tree1 = ( + if (qual == EmptyTree) tree + // atPos necessary because qualifier might come from startContext + else atPos(tree.pos)(Select(qual, name)) + ) + val (tree2, pre2) = makeAccessible(tree1, defSym, pre, qual) + // assert(pre.typeArgs isEmpty) // no need to add #2416-style check here, right? + stabilize(tree2, pre2, mode, pt) } } } def typedCompoundTypeTree(templ: Template) = { val parents1 = templ.parents mapConserve (typedType(_, mode)) - if (parents1 exists (_.tpe.isError)) tree setType ErrorType + if (parents1 exists (_.isErrorTyped)) tree setType ErrorType else { val decls = new Scope //Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id) @@ -4007,10 +4053,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedAppliedTypeTree(tpt: Tree, args: List[Tree]) = { val tpt1 = typed1(tpt, mode | FUNmode | TAPPmode, WildcardType) - if (tpt1.tpe.isError) { - setError(tree) + if (tpt1.isErrorTyped) { + tpt1 } else if (!tpt1.hasSymbol) { - errorTree(tree, tpt1.tpe+" does not take type parameters") + AppliedTypeNoParametersError(tree, tpt1.tpe) } else { val tparams = tpt1.symbol.typeParams if (sameLength(tparams, args)) { @@ -4042,16 +4088,16 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // wrap the tree and include the bounds check -- refchecks will perform this check (that the beta reduction was indeed allowed) and unwrap // we can't simply use original in refchecks because it does not contains types // (and the only typed trees we have have been mangled so they're not quite the original tree anymore) - checkBounds(result.pos, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "") + checkBounds(result, tpt1.tpe.prefix, tpt1.symbol.owner, tpt1.symbol.typeParams, argtypes, "") result // you only get to see the wrapped tree after running this check :-p } setType (result.tpe) setPos(result.pos) else result } else if (tparams.isEmpty) { - errorTree(tree, tpt1.tpe+" does not take type parameters") + AppliedTypeNoParametersError(tree, tpt1.tpe) } else { //Console.println("\{tpt1}:\{tpt1.symbol}:\{tpt1.symbol.info}") if (settings.debug.value) Console.println(tpt1+":"+tpt1.symbol+":"+tpt1.symbol.info)//debug - errorTree(tree, "wrong number of type arguments for "+tpt1.tpe+", should be "+tparams.length) + AppliedTypeWrongNumberOfArgsError(tree, tpt1, tparams) } } } @@ -4098,8 +4144,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val typer1 = newTyper(context.makeNewScope(tree, context.owner)) for (useCase <- comment.useCases) { typer1.silent(_.typedUseCase(useCase)) match { - case ex: TypeError => - unit.warning(useCase.pos, ex.msg) + case SilentTypeError(err) => + unit.warning(useCase.pos, err.errMsg) case _ => } for (useCaseSym <- useCase.defined) { @@ -4114,17 +4160,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { typedAnnotated(constr, typed(arg, mode, pt)) case tree @ Block(_, _) => - newTyper(context.makeNewScope(tree, context.owner)) - .typedBlock(tree, mode, pt) + typerWithLocalContext(context.makeNewScope(tree, context.owner)){ + _.typedBlock(tree, mode, pt) + } case Alternative(alts) => val alts1 = alts mapConserve (alt => typed(alt, mode | ALTmode, pt)) treeCopy.Alternative(tree, alts1) setType pt case Star(elem) => - checkStarPatOK(tree.pos, mode) - val elem1 = typed(elem, mode, pt) - treeCopy.Star(tree, elem1) setType makeFullyDefined(pt) + if ((mode & STARmode) == 0 && !isPastTyper) + StarPatternWithVarargParametersError(tree) + treeCopy.Star(tree, typed(elem, mode, pt)) setType makeFullyDefined(pt) case Bind(name, body) => typedBind(name, body) @@ -4141,8 +4188,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case tree @ Function(_, _) => if (tree.symbol == NoSymbol) tree.symbol = context.owner.newAnonymousFunctionValue(tree.pos) - - newTyper(context.makeNewScope(tree, tree.symbol)).typedFunction(tree, mode, pt) + typerWithLocalContext(context.makeNewScope(tree, tree.symbol))(_.typedFunction(tree, mode, pt)) case Assign(lhs, rhs) => typedAssign(lhs, rhs) @@ -4181,17 +4227,18 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Typed(expr, Function(List(), EmptyTree)) => typedEta(checkDead(typed1(expr, mode, pt))) - case Typed(expr, tpt @ Ident(tpnme.WILDCARD_STAR)) => - val expr0 = typed(expr, onlyStickyModes(mode), WildcardType) + case Typed(expr0, tpt @ Ident(tpnme.WILDCARD_STAR)) => + val expr = typed(expr0, onlyStickyModes(mode), WildcardType) def subArrayType(pt: Type) = if (isValueClass(pt.typeSymbol) || !isFullyDefined(pt)) arrayType(pt) else { val tparam = context.owner freshExistential "" setInfo TypeBounds.upper(pt) newExistentialType(List(tparam), arrayType(tparam.tpe)) } - val (expr1, baseClass) = expr0.tpe.typeSymbol match { - case ArrayClass => (adapt(expr0, onlyStickyModes(mode), subArrayType(pt)), ArrayClass) - case _ => (adapt(expr0, onlyStickyModes(mode), seqType(pt)), SeqClass) + + val (expr1, baseClass) = expr.tpe.typeSymbol match { + case ArrayClass => (adapt(expr, onlyStickyModes(mode), subArrayType(pt)), ArrayClass) + case _ => (adapt(expr, onlyStickyModes(mode), seqType(pt)), SeqClass) } expr1.tpe.baseType(baseClass) match { case TypeRef(_, _, List(elemtp)) => @@ -4203,11 +4250,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case Typed(expr, tpt) => val tpt1 = typedType(tpt, mode) val expr1 = typed(expr, onlyStickyModes(mode), tpt1.tpe.deconst) - val owntype = - if (isPatternMode) inferTypedPattern(tpt1.pos, tpt1.tpe, pt) - else tpt1.tpe - //Console.println(typed pattern: "+tree+":"+", tp = "+tpt1.tpe+", pt = "+pt+" ==> "+owntype)//DEBUG - treeCopy.Typed(tree, expr1, tpt1) setType owntype + val ownType = if (isPatternMode) inferTypedPattern(tpt1, tpt1.tpe, pt) else tpt1.tpe + treeCopy.Typed(tree, expr1, tpt1) setType ownType case TypeApply(fun, args) => // @M: kind-arity checking is done here and in adapt, full kind-checking is in checkKindBounds (in Infer) @@ -4259,12 +4303,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // convert new Array^N[T](len) for N > 1 to evidence[ClassManifest[T]].newArrayN(len) val Some((level, manifType)) = erasure.GenericArray.unapply(tpt.tpe) if (level > MaxArrayDims) - error(tree.pos, "cannot create a generic multi-dimensional array of more than "+MaxArrayDims+" dimensions") - val newArrayApp = atPos(tree.pos) { - val manif = getManifestTree(tree.pos, manifType, false) - new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args) + MultiDimensionalArrayError(tree) + else { + val newArrayApp = atPos(tree.pos) { + val manif = getManifestTree(tree, manifType, false) + new ApplyToImplicitArgs(Select(manif, if (level == 1) "newArray" else "newArray"+level), args) + } + typed(newArrayApp, mode, pt) } - typed(newArrayApp, mode, pt) case tree1 => tree1 } @@ -4295,18 +4341,17 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val tree1 = // temporarily use `filter` and an alternative for `withFilter` if (name == nme.withFilter) silent(_ => typedSelect(qual1, name)) match { - case result1: Tree => - result1 - case ex1: TypeError => + case SilentResultValue(result) => + result + case _ => silent(_ => typed1(Select(qual1, nme.filter) setPos tree.pos, mode, pt)) match { - case result2: Tree => + case SilentResultValue(result2) => unit.deprecationWarning( tree.pos, "`withFilter' method does not yet exist on "+qual1.tpe.widen+ ", using `filter' method instead") result2 - case ex2: TypeError => - reportTypeError(tree.pos, ex1) - setError(tree) + case SilentTypeError(err) => + WithFilterError(tree, err) } } else @@ -4339,8 +4384,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case SelectFromTypeTree(qual, selector) => val qual1 = typedType(qual, mode) - if (qual1.tpe.isVolatile) error(tree.pos, "illegal type selection from volatile type "+qual.tpe) - typedSelect(qual1, selector) + if (qual1.tpe.isVolatile) TypeSelectionFromVolatileTypeError(tree, qual) + else typedSelect(qual1, selector) case CompoundTypeTree(templ) => typedCompoundTypeTree(templ) @@ -4354,7 +4399,9 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { treeCopy.TypeBoundsTree(tree, lo1, hi1) setType TypeBounds(lo1.tpe, hi1.tpe) case etpt @ ExistentialTypeTree(_, _) => - newTyper(context.makeNewScope(tree, context.owner)).typedExistentialTypeTree(etpt, mode) + typerWithLocalContext(context.makeNewScope(tree, context.owner)){ + _.typedExistentialTypeTree(etpt, mode) + } case dc@TypeTreeWithDeferredRefCheck() => dc // TODO: should we re-type the wrapped tree? then we need to change TypeTreeWithDeferredRefCheck's representation to include the wrapped tree explicitly (instead of in its closure) case tpt @ TypeTree() => @@ -4404,7 +4451,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { ptLine("typing %s: pt = %s".format(ptTree(tree), pt), "undetparams" -> context.undetparams, "implicitsEnabled" -> context.implicitsEnabled, - "silent" -> !context.reportGeneralErrors, + "silent" -> context.bufferErrors, "context.owner" -> context.owner ) ) @@ -4424,16 +4471,15 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { tree1, tree1.tpe.widen, pt, context.undetparamsString) ) //DEBUG } - -// for (t <- tree1.tpe) assert(t != WildcardType) -// if ((mode & TYPEmode) != 0) println("type: "+tree1+" has type "+tree1.tpe) if (!isPastTyper) signalDone(context.asInstanceOf[analyzer.Context], tree, result) result } catch { case ex: TypeError => tree.tpe = null + // The only problematic case are (recoverable) cyclic reference errors which can pop up almost anywhere. printTyping("caught %s: while typing %s".format(ex, tree)) //DEBUG - reportTypeError(tree.pos, ex) + + reportTypeError(context, tree.pos, ex) setError(tree) case ex: Exception => if (settings.debug.value) // @M causes cyclic reference error @@ -4455,14 +4501,12 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } - def expandMacro(tree: Tree): Tree = try { - macroExpand(tree) match { - case t: Tree => t - case t => errorTree(tree, "macros must return a compiler-specific tree; returned class is: " + t.getClass) + def expandMacro(tree: Tree): Tree = + macroExpand(tree, context) match { + case Some(t: Tree) => t + case Some(t) => MacroExpandError(tree, t) + case None => setError(tree) // error already reported } - } catch { - case ex: MacroExpandError => errorTree(tree, ex.msg) - } def atOwner(owner: Symbol): Typer = newTyper(context.make(context.tree, owner)) @@ -4547,22 +4591,22 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // to see are those in the signatures. These do not need a unique object as a prefix. // The situation is different for new's and super's, but scalac does not look deep // enough to see those. See #3938 - error(tree.pos, restpe.prefix+" is not a legal prefix for a constructor") - } - - //@M fix for #2208 - // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef - if(result.tpe.typeArgs.isEmpty) { - // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) { - // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not - // designed to deal with the cycles in the scala package (ScalaObject extends - // AnyRef, but the AnyRef type alias is entered after the scala package is - // loaded and completed, so that ScalaObject is unpickled while AnyRef is not - // yet defined ) - result setType(restpe) - } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208 - // during uncurry (after refchecks), all types are normalized - result + ConstructorPrefixError(tree, restpe) + } else { + //@M fix for #2208 + // if there are no type arguments, normalization does not bypass any checks, so perform it to get rid of AnyRef + if (result.tpe.typeArgs.isEmpty) { + // minimal check: if(result.tpe.typeSymbolDirect eq AnyRefClass) { + // must expand the fake AnyRef type alias, because bootstrapping (init in Definitions) is not + // designed to deal with the cycles in the scala package (ScalaObject extends + // AnyRef, but the AnyRef type alias is entered after the scala package is + // loaded and completed, so that ScalaObject is unpickled while AnyRef is not + // yet defined ) + result setType(restpe) + } else { // must not normalize: type application must be (bounds-)checked (during RefChecks), see #2208 + // during uncurry (after refchecks), all types are normalized + result + } } } @@ -4586,11 +4630,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { true, false, context) } - def getManifestTree(pos: Position, tp: Type, full: Boolean): Tree = { + def getManifestTree(tree: Tree, tp: Type, full: Boolean): Tree = { val manifestOpt = findManifest(tp, full) if (manifestOpt.tree.isEmpty) { - error(pos, "cannot find "+(if (full) "" else "class ")+"manifest for element type "+tp) - Literal(Constant(null)) + MissingManifestError(tree, full, tp) } else { manifestOpt.tree } diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 2394925657..27f2ab1d08 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -8,8 +8,6 @@ package api import scala.collection.mutable.ListBuffer -//import scala.tools.nsc.util.{ FreshNameCreator, HashSet, SourceFile } - trait Trees /*extends reflect.generic.Trees*/ { self: Universe => private[scala] var nodeCount = 0 @@ -549,7 +547,7 @@ trait Trees /*extends reflect.generic.Trees*/ { self: Universe => Select(qualifier, sym.name) setSymbol sym /** Identifier */ - case class Ident(name: Name) extends RefTree { } + case class Ident(name: Name) extends RefTree def Ident(name: String): Ident = Ident(newTermName(name)) diff --git a/test/files/buildmanager/t2790/t2790.check b/test/files/buildmanager/t2790/t2790.check index 4e41db4e49..13d61dac42 100644 --- a/test/files/buildmanager/t2790/t2790.check +++ b/test/files/buildmanager/t2790/t2790.check @@ -9,6 +9,5 @@ compiling Set(B.scala) B.scala:2: error: type mismatch; found : Int(5) required: String -Error occurred in an application involving default arguments. val y = A.x(5) ^ diff --git a/test/files/neg/sensitive2.check b/test/files/neg/sensitive2.check new file mode 100644 index 0000000000..19152fe188 --- /dev/null +++ b/test/files/neg/sensitive2.check @@ -0,0 +1,10 @@ +sensitive2.scala:6: error: type mismatch; + found : String("abc") + required: Test.Foo[_] +Note that implicit conversions are not applicable because they are ambiguous: + both method foo1 in object Test of type [A](a: A)Test.Foo[A] + and method foo2 in object Test of type (a: Any)Test.Foo[String] + are possible conversion functions from String("abc") to Test.Foo[_] + val a: Foo[_] = "abc" + ^ +one error found diff --git a/test/files/neg/sensitive2.scala b/test/files/neg/sensitive2.scala new file mode 100644 index 0000000000..92b91bef20 --- /dev/null +++ b/test/files/neg/sensitive2.scala @@ -0,0 +1,8 @@ +object Test { + class Foo[A](z: A) + implicit def foo1[A](a: A): Foo[A] = new Foo(a) + implicit def foo2(a: Any): Foo[String] = new Foo("123") + + val a: Foo[_] = "abc" + +} \ No newline at end of file diff --git a/test/files/neg/t1878.check b/test/files/neg/t1878.check index f3a6701d41..128741a022 100644 --- a/test/files/neg/t1878.check +++ b/test/files/neg/t1878.check @@ -6,10 +6,13 @@ t1878.scala:3: error: scrutinee is incompatible with pattern type; required: String val err1 = "" match { case Seq(f @ _*, ',') => f } ^ +t1878.scala:3: error: not found: value f + val err1 = "" match { case Seq(f @ _*, ',') => f } + ^ t1878.scala:9: error: _* may only come last val List(List(_*, arg2), _) = List(List(1,2,3), List(4,5,6)) ^ t1878.scala:13: error: _* may only come last case

{ _* }

=> ^ -four errors found +5 errors found diff --git a/test/files/neg/t2641.check b/test/files/neg/t2641.check index 2056a1b9ab..9e2f02ac47 100644 --- a/test/files/neg/t2641.check +++ b/test/files/neg/t2641.check @@ -1,4 +1,4 @@ -t2641.scala:18: error: illegal cyclic reference involving trait ManagedSeq +t2641.scala:18: error: wrong number of type arguments for ManagedSeq, should be 2 with TraversableViewLike[A, ManagedSeqStrict[A], ManagedSeq[A]] ^ t2641.scala:16: error: illegal inheritance; @@ -13,23 +13,7 @@ t2641.scala:16: error: illegal inheritance; self-type ManagedSeq does not conform to ScalaObject's selftype ScalaObject extends ManagedSeqStrict[A] ^ -t2641.scala:24: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = typer - trait Transformed[+B] extends ManagedSeq[B, Coll] with super.Transformed[B] - ^ -t2641.scala:26: error: something is wrong (wrong class file?): trait ManagedSeq with type parameters [A,Coll] gets applied to arguments [], phase = namer - trait Sliced extends Transformed[A] with super.Sliced { - ^ -t2641.scala:26: error: illegal inheritance; superclass Any - is not a subclass of the superclass ManagedSeqStrict - of the mixin trait Transformed - trait Sliced extends Transformed[A] with super.Sliced { - ^ -t2641.scala:26: error: illegal inheritance; superclass Any - is not a subclass of the superclass Object - of the mixin trait Sliced - trait Sliced extends Transformed[A] with super.Sliced { - ^ t2641.scala:27: error: value managedIterator is not a member of ManagedSeq override def managedIterator = self.managedIterator slice (from, until) ^ -9 errors found +5 errors found diff --git a/test/files/neg/t2918.check b/test/files/neg/t2918.check index 263beab518..aae3045e8a 100644 --- a/test/files/neg/t2918.check +++ b/test/files/neg/t2918.check @@ -1,10 +1,10 @@ t2918.scala:2: error: illegal cyclic reference involving type A - def g[X, A[X] <: A[X]](x: A[X]) = x + def g[X, A[X] <: A[X]](x: A[X]) = x ^ t2918.scala:2: error: cyclic aliasing or subtyping involving type A - def g[X, A[X] <: A[X]](x: A[X]) = x + def g[X, A[X] <: A[X]](x: A[X]) = x ^ t2918.scala:2: error: A does not take type parameters - def g[X, A[X] <: A[X]](x: A[X]) = x + def g[X, A[X] <: A[X]](x: A[X]) = x ^ three errors found diff --git a/test/files/neg/t2918.scala b/test/files/neg/t2918.scala index 03477ccfbf..ff2be39ae0 100755 --- a/test/files/neg/t2918.scala +++ b/test/files/neg/t2918.scala @@ -1,3 +1,3 @@ object Test { - def g[X, A[X] <: A[X]](x: A[X]) = x + def g[X, A[X] <: A[X]](x: A[X]) = x } diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check index 0b394e23d6..53221b7ca0 100644 --- a/test/files/neg/t3015.check +++ b/test/files/neg/t3015.check @@ -3,9 +3,4 @@ t3015.scala:7: error: scrutinee is incompatible with pattern type; required: String val b(foo) = "foo" ^ -t3015.scala:7: error: type mismatch; - found : String with _$1(in object Test) where type +_$1(in object Test) - required: (some other)_$1(in object Test) where type +(some other)_$1(in object Test) - val b(foo) = "foo" - ^ -two errors found +one error found diff --git a/test/files/neg/t649.check b/test/files/neg/t649.check index 5a270d4751..a6670886b5 100644 --- a/test/files/neg/t649.check +++ b/test/files/neg/t649.check @@ -1,4 +1,4 @@ t649.scala:3: error: overloaded method foo needs result type def foo[A] = foo[A] - ^ + ^ one error found -- cgit v1.2.3 From 3c88d6f44a5c08bb003cd8458bfb5a84d3b56c50 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Wed, 25 Jan 2012 14:15:53 +0100 Subject: Scalac fork no longer dumps stacktraces on compilation errors Current behavior of scalacfork task is to fail the build when there are compilation errors reported by scalac fork. So far, so good. However, this functionality is implemented by throwing sys.error, which makes ant dump the entire stacktrace. This is annoying, since it almost certainly scrolls the screen away of the error (hello, dear 1366x768) and buries it under a meaningless stacktrace. Surprisingly, there is a very simple fix that remedies the situation. Credit goes to @bakoyaro from SO: http://bit.ly/xdR306 --- src/compiler/scala/tools/ant/sabbus/ScalacFork.scala | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala index a39de64c5a..5199e273d7 100644 --- a/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala +++ b/src/compiler/scala/tools/ant/sabbus/ScalacFork.scala @@ -13,6 +13,7 @@ import java.io.{ File, FileWriter } import org.apache.tools.ant.Project import org.apache.tools.ant.taskdefs.Java import org.apache.tools.ant.util.{ GlobPatternMapper, SourceFileScanner } +import org.apache.tools.ant.BuildException import scala.tools.nsc.io import scala.tools.nsc.util.ScalaClassLoader @@ -150,7 +151,7 @@ class ScalacFork extends ScalaMatchingTask with ScalacShared with TaskArgs { val res = execWithArgFiles(java, paths) if (failOnError && res != 0) - sys.error("Compilation failed because of an internal compiler error;"+ + throw new BuildException("Compilation failed because of an internal compiler error;"+ " see the error output for details.") } } -- cgit v1.2.3 From eafdc9069676827d79c596afdb493c69aebc8140 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 14:21:53 +0100 Subject: Added doc comments to Names trait. --- src/library/scala/reflect/api/Names.scala | 33 ++++++++++++++++++++++++++++--- 1 file changed, 30 insertions(+), 3 deletions(-) diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala index e226d2265a..9498f0af36 100755 --- a/src/library/scala/reflect/api/Names.scala +++ b/src/library/scala/reflect/api/Names.scala @@ -1,32 +1,59 @@ package scala.reflect package api +/** A trait that manages names. + * A name is a string in one of two name universes: terms and types. + * The same string can be a name in both universes. + * Two names are equal if they represent the same string and they are + * members of the same universe. + * + * Names are interned. That is, for two names `name11 and `name2`, + * `name1 == name2` implies `name1 eq name2`. + */ trait Names { - + + /** The abstract type of names */ type Name >: Null <: AbsName + + /** The abstract type of names representing terms */ type TypeName <: Name + + /** The abstract type of names representing types */ type TermName <: Name abstract class AbsName { + /** Is this name a term name? */ def isTermName: Boolean + + /** Is this name a type name? */ def isTypeName: Boolean + + /** Returns a term name that represents the same string as this name */ def toTermName: TermName + + /** Returns a type name that represents the same string as this name */ def toTypeName: TypeName - /** Replace all occurrences of $op_names in this name by corresponding operator symbols. + /** Replaces all occurrences of $op_names in this name by corresponding operator symbols. * Example: `foo_+=` becomes `foo_$plus$eq`. */ def decode: String - /** Replace all occurrences of operator symbols in this name by corresponding $op_names. + /** Replaces all occurrences of operator symbols in this name by corresponding $op_names. * Example: `foo_$plus$eq` becomes `foo_+=` */ def encode: Name } + /** Create a new term name. + */ def newTermName(s: String): TermName + + /** Creates a new type name. + */ def newTypeName(s: String): TypeName def EmptyTermName: TermName = newTermName("") + def EmptyTypeName: TypeName = EmptyTermName.toTypeName } -- cgit v1.2.3 From 65a1e8bd2dbd796bedc0232615cfc3caf18fd4b3 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 14:30:53 +0100 Subject: Cleanup and better documentation of reflect.api.Symbols trait --- src/library/scala/reflect/api/Symbols.scala | 91 ++++++++++++++++++++--------- 1 file changed, 63 insertions(+), 28 deletions(-) diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala index 8b4b170847..01c1a0f2ae 100755 --- a/src/library/scala/reflect/api/Symbols.scala +++ b/src/library/scala/reflect/api/Symbols.scala @@ -15,7 +15,14 @@ trait Symbols { self: Universe => */ def hasModifier(mod: Modifier.Value): Boolean - /** The owner of this symbol. + /** The owner of this symbol. This is the symbol + * that directly contains the current symbol's definition. + * The `NoSymbol` symbol does not have an owner, and calling this method + * on one causes an internal error. + * The owner of the Scala root class [[scala.reflect.api.mirror.RootClass]] + * and the Scala root object [[scala.reflect.api.mirror.RootPackage]] is `NoSymbol`. + * Every other symbol has a chain of owners that ends in + * [[scala.reflect.api.mirror.RootClass]]. */ def owner: Symbol @@ -74,23 +81,6 @@ trait Symbols { self: Universe => */ def annotations: List[self.AnnotationInfo] - /** The type of the symbol - */ - def tpe: Type - - /** The info of the symbol. This is like tpe, except for class symbols where the `info` - * describes the contents of the class whereas the `tpe` is a reference to the class. - */ - def info: Type - - /** If this symbol is a class or trait, its self type, otherwise the type of the symbol itself - */ - def typeOfThis: Type - - /** The type `C.this`, where `C` is the current class. - */ - def thisType: Type - /** For a class: the module or case class factory with the same name in the same package. * For all others: NoSymbol */ @@ -114,20 +104,43 @@ trait Symbols { self: Universe => /** The top-level class containing this symbol. */ def toplevelClass: Symbol - /** The next enclosing class */ + /** The next enclosing class, or `NoSymbol` if none exists */ def enclClass : Symbol - /** The next enclosing method */ + /** The next enclosing method, or `NoSymbol` if none exists */ def enclMethod : Symbol + /** Does this symbol represent the definition of term? + * Note that every symbol is either a term or a type. + * So for every symbol `sym`, either `sym.isTerm` is true + * or `sym.isType` is true. + */ def isTerm : Boolean + + /** Does this symbol represent the definition of type? + * Note that every symbol is either a term or a type. + * So for every symbol `sym`, either `sym.isTerm` is true + * or `sym.isType` is true. + */ def isType : Boolean + + /** Does this symbol represent the definition of class? + * If yes, `isType` is also guaranteed to be true. + */ def isClass : Boolean + + /** Does this symbol represent the definition of a type alias? + * If yes, `isType` is also guaranteed to be true. + */ def isAliasType : Boolean + + /** Does this symbol represent the definition of an abstract type? + * If yes, `isType` is also guaranteed to be true. + */ def isAbstractType : Boolean /** The type signature of this symbol. - * Note if symbol is a member of a class, one almost always is interested + * Note if the symbol is a member of a class, one almost always is interested * in `typeSigIn` with a site type instead. */ def typeSig: Type @@ -136,22 +149,44 @@ trait Symbols { self: Universe => */ def typeSigIn(site: Type): Type - /** The type constructor corresponding to this type symbol. - */ - def asTypeConstructor: Type // needed by LiftCode - - /** A type reference that refers to this type symbol + /** A type reference that refers to this type symbol * Note if symbol is a member of a class, one almost always is interested * in `asTypeIn` with a site type instead. + * + * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol + * `C`. Then `C.asType` is the type `C[T]`. + * + * By contrast, `C.typeSig` would be a type signature of form + * `PolyType(ClassInfoType(...))` that describes type parameters, value + * parameters, parent types, and members of `C`. */ def asType: Type - /** A type reference that refers to this type symbol seen as a member of given type `site`. + /** A type reference that refers to this type symbol seen + * as a member of given type `site`. */ def asTypeIn(site: Type): Type + /** The type constructor corresponding to this type symbol. + * This is different from `asType` in that type parameters + * are part of results of `asType`, but not of `asTypeConstructor`. + * + * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol + * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeCponstructor` is `C`. + */ + def asTypeConstructor: Type // needed by LiftCode + + /** If this symbol is a class or trait, its self type, otherwise the type + * of the symbol itself. + */ + def typeOfThis: Type + + /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`. + */ + def thisType: Type + /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has - * the current symbol as its owner. + * the current symbol as its owner. */ def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode -- cgit v1.2.3 From c749710859d32252291802d55d48abe518ddd118 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 15:04:53 +0100 Subject: Making reflection thread-safe. The idea is that all operations that need to be synchronized are overriden in classes reflect.runtime.Synchronized*. Sometimes this applies to operations defined in SymbolTable, which can be directly overridden. Sometimes it is more convenient to generate SynchronizedClazz subclasses of SymbolTable classes Clazz. In the latter case, all instance creation must go over factory methods that can be overridden in the Synchronized traits. --- .../scala/reflect/internal/BaseTypeSeqs.scala | 47 ++- .../scala/reflect/internal/Definitions.scala | 8 +- .../scala/reflect/internal/Importers.scala | 4 +- src/compiler/scala/reflect/internal/Scopes.scala | 46 +-- .../scala/reflect/internal/SymbolTable.scala | 5 + src/compiler/scala/reflect/internal/Symbols.scala | 121 +++--- src/compiler/scala/reflect/internal/Types.scala | 448 +++++++++++++-------- src/compiler/scala/reflect/runtime/Loaders.scala | 2 +- src/compiler/scala/reflect/runtime/Mirror.scala | 3 +- .../scala/reflect/runtime/SymbolTable.scala | 2 +- .../scala/reflect/runtime/SynchronizedOps.scala | 52 +++ .../reflect/runtime/SynchronizedSymbols.scala | 119 ++++++ .../scala/reflect/runtime/SynchronizedTypes.scala | 87 ++++ src/compiler/scala/reflect/runtime/ToolBoxes.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 2 +- .../scala/tools/nsc/matching/Patterns.scala | 2 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 2 +- .../nsc/symtab/classfile/ClassfileParser.scala | 4 +- .../tools/nsc/symtab/classfile/MetaParser.scala | 4 +- .../scala/tools/nsc/symtab/clr/TypeParser.scala | 8 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- .../scala/tools/nsc/transform/Constructors.scala | 2 +- .../scala/tools/nsc/transform/Erasure.scala | 2 +- .../scala/tools/nsc/transform/Flatten.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 6 +- .../tools/nsc/transform/OverridingPairs.scala | 2 +- .../tools/nsc/transform/SpecializeTypes.scala | 4 +- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../scala/tools/nsc/typechecker/Implicits.scala | 2 +- .../scala/tools/nsc/typechecker/Namers.scala | 4 +- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- .../scala/tools/nsc/typechecker/Typers.scala | 6 +- src/detach/plugin/scala/tools/detach/Detach.scala | 4 +- .../scala/reflect/api/StandardDefinitions.scala | 4 +- src/library/scala/reflect/api/Types.scala | 39 +- test/files/run/reflection-implClass.scala | 16 +- 38 files changed, 726 insertions(+), 345 deletions(-) create mode 100644 src/compiler/scala/reflect/runtime/SynchronizedOps.scala create mode 100644 src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala create mode 100644 src/compiler/scala/reflect/runtime/SynchronizedTypes.scala diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala index 38277b5a09..53e89b3d1e 100644 --- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala @@ -29,7 +29,10 @@ trait BaseTypeSeqs { this: SymbolTable => import definitions._ - class BaseTypeSeq(parents: List[Type], elems: Array[Type]) { + protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) = + new BaseTypeSeq(parents, elems) + + class BaseTypeSeq(private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => incCounter(baseTypeSeqCount) incCounter(baseTypeSeqLenTotal, elems.length) @@ -41,7 +44,7 @@ trait BaseTypeSeqs { // (while NoType is in there to indicate a cycle in this BTS, during the execution of // the mergePrefixAndArgs below, the elems get copied without the pending map, // so that NoType's are seen instead of the original type --> spurious compile error) - val pending = new mutable.BitSet(length) + private val pending = new mutable.BitSet(length) /** The type at i'th position in this sequence; lazy types are returned evaluated. */ def apply(i: Int): Type = @@ -89,11 +92,11 @@ trait BaseTypeSeqs { /** Return all evaluated types in this sequence as a list */ def toList: List[Type] = elems.toList - protected def copy(head: Type, offset: Int): BaseTypeSeq = { + def copy(head: Type, offset: Int): BaseTypeSeq = { val arr = new Array[Type](elems.length + offset) compat.Platform.arraycopy(elems, 0, arr, offset, elems.length) arr(0) = head - new BaseTypeSeq(parents, arr) + newBaseTypeSeq(parents, arr) } /** Compute new base type sequence with `tp` prepended to this sequence */ @@ -113,21 +116,10 @@ trait BaseTypeSeqs { arr(i) = f(elems(i)) i += 1 } - new BaseTypeSeq(parents, arr) + newBaseTypeSeq(parents, arr) } - def lateMap(f: Type => Type): BaseTypeSeq = new BaseTypeSeq(parents map f, elems) { - override def apply(i: Int) = f(self.apply(i)) - override def rawElem(i: Int) = f(self.rawElem(i)) - override def typeSymbol(i: Int) = self.typeSymbol(i) - override def toList = self.toList map f - override protected def copy(head: Type, offset: Int) = (self map f).copy(head, offset) - override def map(g: Type => Type) = lateMap(g) - override def lateMap(g: Type => Type) = self.lateMap(x => g(f(x))) - override def exists(p: Type => Boolean) = elems exists (x => p(f(x))) - override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max - override def toString = elems.mkString("MBTS(", ",", ")") - } + def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) def exists(p: Type => Boolean): Boolean = elems exists p @@ -177,10 +169,10 @@ trait BaseTypeSeqs { /** A merker object for a base type sequence that's no yet computed. * used to catch inheritance cycles */ - val undetBaseTypeSeq: BaseTypeSeq = new BaseTypeSeq(List(), Array()) + val undetBaseTypeSeq: BaseTypeSeq = newBaseTypeSeq(List(), Array()) /** Create a base type sequence consisting of a single type */ - def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = new BaseTypeSeq(List(), Array(tp)) + def baseTypeSingletonSeq(tp: Type): BaseTypeSeq = newBaseTypeSeq(List(), Array(tp)) /** Create the base type sequence of a compound type wuth given tp.parents */ def compoundBaseTypeSeq(tp: Type): BaseTypeSeq = { @@ -244,8 +236,21 @@ trait BaseTypeSeqs { val elems = new Array[Type](btsSize) buf.copyToArray(elems, 0) // Console.println("computed baseTypeSeq of " + tsym.tpe + " " + parents + ": "+elems.toString)//DEBUG - new BaseTypeSeq(parents, elems) + newBaseTypeSeq(parents, elems) } - + + class MappedBaseTypeSeq(orig: BaseTypeSeq, f: Type => Type) extends BaseTypeSeq(orig.parents map f, orig.elems) { + override def apply(i: Int) = f(orig.apply(i)) + override def rawElem(i: Int) = f(orig.rawElem(i)) + override def typeSymbol(i: Int) = orig.typeSymbol(i) + override def toList = orig.toList map f + override def copy(head: Type, offset: Int) = (orig map f).copy(head, offset) + override def map(g: Type => Type) = lateMap(g) + override def lateMap(g: Type => Type) = orig.lateMap(x => g(f(x))) + override def exists(p: Type => Boolean) = elems exists (x => p(f(x))) + override protected def maxDepthOfElems: Int = elems map (x => maxDpth(f(x))) max + override def toString = elems.mkString("MBTS(", ",", ")") + } + val CyclicInheritance = new Throwable } diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index d38b62cbb4..a733f0d1ee 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -16,7 +16,7 @@ trait Definitions extends reflect.api.StandardDefinitions { private def newClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): Symbol = { val clazz = owner.newClassSymbol(name, NoPosition, flags) - clazz setInfoAndEnter ClassInfoType(parents, new Scope, clazz) + clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) } private def newMethod(owner: Symbol, name: TermName, formals: List[Type], restpe: Type, flags: Long = 0L): Symbol = { val msym = owner.newMethod(name.encode, NoPosition, flags) @@ -206,7 +206,7 @@ trait Definitions extends reflect.api.StandardDefinitions { sealed abstract class BottomClassSymbol(name: TypeName, parent: Symbol) extends ClassSymbol(ScalaPackageClass, NoPosition, name) { locally { this initFlags ABSTRACT | TRAIT | FINAL - this setInfoAndEnter ClassInfoType(List(parent.tpe), new Scope, this) + this setInfoAndEnter ClassInfoType(List(parent.tpe), newScope, this) } final override def isBottomClass = true } @@ -352,7 +352,7 @@ trait Definitions extends reflect.api.StandardDefinitions { ) lazy val EqualsPatternClass = { val clazz = newClass(ScalaPackageClass, tpnme.EQUALS_PATTERN_NAME, Nil) - clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, new Scope, clazz)) + clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, newScope, clazz)) } lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") @@ -823,7 +823,7 @@ trait Definitions extends reflect.api.StandardDefinitions { clazz.setInfo( polyType( List(tparam), - ClassInfoType(List(AnyRefClass.tpe, p), new Scope, clazz))) + ClassInfoType(List(AnyRefClass.tpe, p), newScope, clazz))) } private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol = diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index 53380952c0..23b443919a 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -210,9 +210,9 @@ trait Importers { self: SymbolTable => result } - // !!! todo: override to vcater for PackageScopes + // !!! todo: override to cater for PackageScopes def importScope(decls: from.Scope): Scope = - new Scope(decls.toList map importSymbol) + newScopeWith(decls.toList map importSymbol: _*) def importName(name: from.Name): Name = if (name.isTypeName) newTypeName(name.toString) else newTermName(name.toString) diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala index fb3012adff..8861386bc8 100644 --- a/src/compiler/scala/reflect/internal/Scopes.scala +++ b/src/compiler/scala/reflect/internal/Scopes.scala @@ -37,9 +37,14 @@ trait Scopes extends api.Scopes { self: SymbolTable => def unapplySeq(decls: Scope): Some[Seq[Symbol]] = Some(decls.toList) } - class Scope(initElems: ScopeEntry) extends Iterable[Symbol] { + class Scope(initElems: ScopeEntry = null) extends Iterable[Symbol] { + + def this(base: Scope) = { + this(base.elems) + nestinglevel = base.nestinglevel + 1 + } - var elems: ScopeEntry = initElems + private[scala] var elems: ScopeEntry = initElems /** The number of times this scope is nested in another */ @@ -65,20 +70,8 @@ trait Scopes extends api.Scopes { self: SymbolTable => if (size >= MIN_HASH) createHash() - def this() = this(null: ScopeEntry) - - def this(base: Scope) = { - this(base.elems) - nestinglevel = base.nestinglevel + 1 - } - - def this(decls: List[Symbol]) = { - this() - decls foreach enter - } - /** Returns a new scope with the same content as this one. */ - def cloneScope: Scope = new Scope(this.toList) + def cloneScope: Scope = newScopeWith(this.toList: _*) /** is the scope empty? */ override def isEmpty: Boolean = elems eq null @@ -311,7 +304,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => override def foreach[U](p: Symbol => U): Unit = toList foreach p override def filter(p: Symbol => Boolean): Scope = - if (!(toList forall p)) new Scope(toList filter p) else this + if (!(toList forall p)) newScopeWith(toList filter p: _*) else this override def mkString(start: String, sep: String, end: String) = toList.map(_.defString).mkString(start, sep, end) @@ -321,21 +314,26 @@ trait Scopes extends api.Scopes { self: SymbolTable => } /** Create a new scope */ - def newScope: Scope = new Scope + def newScope: Scope = new Scope() + + /** Create a new scope nested in another one with which it shares its elements */ + def newNestedScope(outer: Scope): Scope = new Scope(outer) + + /** Create a new scope with given initial elements */ + def newScopeWith(elems: Symbol*): Scope = { + val scope = newScope + elems foreach scope.enter + scope + } /** Create new scope for the members of package `pkg` */ - def newPackageScope(pkgClass: Symbol): Scope = new Scope + def newPackageScope(pkgClass: Symbol): Scope = newScope /** Transform scope of members of `owner` using operation `op` * This is overridden by the reflective compiler to avoid creating new scopes for packages */ def scopeTransform(owner: Symbol)(op: => Scope): Scope = op - def newScopeWith(elems: Symbol*): Scope = { - val scope = newScope - elems foreach scope.enter - scope - } /** The empty scope (immutable). */ @@ -347,7 +345,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => /** The error scope. */ - class ErrorScope(owner: Symbol) extends Scope(null: ScopeEntry) + class ErrorScope(owner: Symbol) extends Scope private final val maxRecursions = 1000 diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala index ace4d55b90..717693fa1f 100644 --- a/src/compiler/scala/reflect/internal/SymbolTable.scala +++ b/src/compiler/scala/reflect/internal/SymbolTable.scala @@ -271,4 +271,9 @@ abstract class SymbolTable extends api.Universe /** The phase which has given index as identifier. */ val phaseWithId: Array[Phase] + + /** Is this symbol table part of reflexive mirror? In this case + * operations need to be made thread safe. + */ + def inReflexiveMirror = false } diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index b4d2b1531f..ecd2de6f56 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -16,10 +16,13 @@ import api.Modifier trait Symbols extends api.Symbols { self: SymbolTable => import definitions._ - private var ids = 0 + protected var ids = 0 + + val emptySymbolArray = new Array[Symbol](0) + def symbolCount = ids // statistics - val emptySymbolArray = new Array[Symbol](0) + protected def nextId() = { ids += 1; ids } /** Used for deciding in the IDE whether we can interrupt the compiler */ //protected var activeLocks = 0 @@ -31,7 +34,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => private var recursionTable = immutable.Map.empty[Symbol, Int] private var nextexid = 0 - private def freshExistentialName(suffix: String) = { + protected def freshExistentialName(suffix: String) = { nextexid += 1 newTypeName("_" + nextexid + suffix) } @@ -42,6 +45,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => m setModuleClass moduleClass m } + /** Create a new free variable. Its owner is NoSymbol. */ def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar = @@ -77,14 +81,24 @@ trait Symbols extends api.Symbols { self: SymbolTable => type AccessBoundaryType = Symbol type AnnotationType = AnnotationInfo - var rawowner = initOwner - var rawname = initName - var rawflags = 0L - + private[this] var _rawowner = initOwner // Syncnote: need not be protected, as only assignment happens in owner_=, which is not exposed to api + private[this] var _rawname = initName + private[this] var _rawflags = 0L + + def rawowner = _rawowner + def rawname = _rawname + def rawflags = _rawflags + + protected def rawflags_=(x: FlagsType) { _rawflags = x } + private var rawpos = initPos - val id = { ids += 1; ids } // identity displayed when -uniqid + + val id = nextId() // identity displayed when -uniqid - var validTo: Period = NoPeriod + private[this] var _validTo: Period = NoPeriod + + def validTo = _validTo + def validTo_=(x: Period) { _validTo = x} def pos = rawpos def setPos(pos: Position): this.type = { this.rawpos = pos; this } @@ -336,7 +350,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // True if the symbol is unlocked. // True if the symbol is locked but still below the allowed recursion depth. // False otherwise - def lockOK: Boolean = { + private[scala] def lockOK: Boolean = { ((rawflags & LOCKED) == 0L) || ((settings.Yrecursion.value != 0) && (recursionTable get this match { @@ -345,7 +359,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } // Lock a symbol, using the handler if the recursion depth becomes too great. - def lock(handler: => Unit) = { + private[scala] def lock(handler: => Unit) = { if ((rawflags & LOCKED) != 0L) { if (settings.Yrecursion.value != 0) { recursionTable get this match { @@ -360,18 +374,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } else { handler } } else { - rawflags |= LOCKED + _rawflags |= LOCKED // activeLocks += 1 // lockedSyms += this } } // Unlock a symbol - def unlock() = { + private[scala] def unlock() = { if ((rawflags & LOCKED) != 0L) { // activeLocks -= 1 // lockedSyms -= this - rawflags = rawflags & ~LOCKED + _rawflags = rawflags & ~LOCKED if (settings.Yrecursion.value != 0) recursionTable -= this } @@ -736,7 +750,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ------ owner attribute -------------------------------------------------------------- def owner: Symbol = rawowner - final def owner_=(owner: Symbol) { + def owner_=(owner: Symbol) { // don't keep the original owner in presentation compiler runs // (the map will grow indefinitely, and the only use case is the // backend). @@ -744,8 +758,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => if (originalOwner contains this) () else originalOwner(this) = rawowner } - - rawowner = owner + assert(!inReflexiveMirror, "owner_= is not thread-safe; cannot be run in reflexive code") + _rawowner = owner } def ownerChain: List[Symbol] = this :: owner.ownerChain @@ -778,7 +792,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def name: Name = rawname - final def name_=(name: Name) { + def name_=(name: Name) { if (name != rawname) { if (owner.isClass) { var ifs = owner.infos @@ -787,7 +801,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => ifs = ifs.prev } } - rawname = name + _rawname = name } } @@ -855,20 +869,20 @@ trait Symbols extends api.Symbols { self: SymbolTable => val fs = rawflags & phase.flagMask (fs | ((fs & LateFlags) >>> LateShift)) & ~(fs >>> AntiShift) } - final def flags_=(fs: Long) = rawflags = fs + def flags_=(fs: Long) = _rawflags = fs /** Set the symbol's flags to the given value, asserting * that the previous value was 0. */ def initFlags(mask: Long): this.type = { assert(rawflags == 0L, this) - rawflags = mask + _rawflags = mask this } - def setFlag(mask: Long): this.type = { rawflags = rawflags | mask ; this } - def resetFlag(mask: Long): this.type = { rawflags = rawflags & ~mask ; this } + def setFlag(mask: Long): this.type = { _rawflags = rawflags | mask ; this } + def resetFlag(mask: Long): this.type = { _rawflags = rawflags & ~mask ; this } final def getFlag(mask: Long): Long = flags & mask - final def resetFlags() { rawflags = rawflags & TopLevelCreationFlags } + final def resetFlags() { _rawflags = rawflags & TopLevelCreationFlags } /** Does symbol have ANY flag in `mask` set? */ final def hasFlag(mask: Long): Boolean = (flags & mask) != 0L @@ -954,7 +968,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => throw CyclicReference(this, tp) } } else { - rawflags |= LOCKED + _rawflags |= LOCKED // activeLocks += 1 // lockedSyms += this } @@ -984,7 +998,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => assert(info ne null) infos = TypeHistory(currentPeriod, info, null) unlock() - validTo = if (info.isComplete) currentPeriod else NoPeriod + _validTo = if (info.isComplete) currentPeriod else NoPeriod } /** Set initial info. */ @@ -1003,11 +1017,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** Set new info valid from start of this phase. */ - final def updateInfo(info: Type): Symbol = { + def updateInfo(info: Type): Symbol = { assert(phaseId(infos.validFrom) <= phase.id) if (phaseId(infos.validFrom) == phase.id) infos = infos.prev infos = TypeHistory(currentPeriod, info, infos) - validTo = if (info.isComplete) currentPeriod else NoPeriod + _validTo = if (info.isComplete) currentPeriod else NoPeriod this } @@ -1045,11 +1059,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => infos = TypeHistory(currentPeriod + 1, info1, infos) this.infos = infos } - validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform + _validTo = currentPeriod + 1 // to enable reads from same symbol during info-transform itr = itr.next } - validTo = if (itr.pid == NoPhase.id) curPeriod - else period(currentRunId, itr.pid) + _validTo = if (itr.pid == NoPhase.id) curPeriod + else period(currentRunId, itr.pid) } } finally { phase = current @@ -1060,7 +1074,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => } // adapt to new run in fsc. - private def adaptInfos(infos: TypeHistory): TypeHistory = + private def adaptInfos(infos: TypeHistory): TypeHistory = { + assert(!inReflexiveMirror) if (infos == null || runId(infos.validFrom) == currentRunId) { infos } else { @@ -1069,7 +1084,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => else { val pid = phaseId(infos.validFrom) - validTo = period(currentRunId, pid) + _validTo = period(currentRunId, pid) phase = phaseWithId(pid) val info1 = ( @@ -1085,6 +1100,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } } + } /** Initialize the symbol */ final def initialize: this.type = { @@ -1094,6 +1110,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Was symbol's type updated during given phase? */ final def isUpdatedAt(pid: Phase#Id): Boolean = { + assert(!inReflexiveMirror) var infos = this.infos while ((infos ne null) && phaseId(infos.validFrom) != pid + 1) infos = infos.prev infos ne null @@ -1101,6 +1118,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Was symbol's type updated during given phase? */ final def hasTypeAt(pid: Phase#Id): Boolean = { + assert(!inReflexiveMirror) var infos = this.infos while ((infos ne null) && phaseId(infos.validFrom) > pid) infos = infos.prev infos ne null @@ -1212,7 +1230,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def reset(completer: Type) { resetFlags() infos = null - validTo = NoPeriod + _validTo = NoPeriod //limit = NoPhase.id setInfo(completer) } @@ -1239,7 +1257,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => // ----- annotations ------------------------------------------------------------ // null is a marker that they still need to be obtained. - private var _annotations: List[AnnotationInfo] = Nil + private[this] var _annotations: List[AnnotationInfo] = Nil def annotationsString = if (annotations.isEmpty) "" else annotations.mkString("(", ", ", ")") @@ -2053,8 +2071,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def name: TermName = rawname.toTermName privateWithin = NoSymbol - var referenced: Symbol = NoSymbol - + private[this] var _referenced: Symbol = NoSymbol + + def referenced: Symbol = _referenced + def referenced_=(x: Symbol) { _referenced = x } + def existentialBound = singletonBounds(this.tpe) def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = @@ -2226,7 +2247,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class of type symbols. Alias and abstract types are direct instances * of this class. Classes are instances of a subclass. */ - sealed abstract class TypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) { + abstract class TypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) { privateWithin = NoSymbol private var tyconCache: Type = null private var tyconRunId = NoRunId @@ -2395,9 +2416,9 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class for class symbols */ class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) extends TypeSymbol(initOwner, initPos, initName) { - private var flatname: TypeName = null - private var source: AbstractFileType = null - private var thissym: Symbol = this + private[this] var flatname: TypeName = null + private[this] var source: AbstractFileType = null + private[this] var thissym: Symbol = this final override def isClass = true final override def isNonClassType = false @@ -2459,7 +2480,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } typeOfThisCache } - else thissym.tpe + else thisSym.tpe } /** Sets the self type of the class */ @@ -2479,7 +2500,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def sourceModule = if (isModuleClass) companionModule else NoSymbol - private var childSet: Set[Symbol] = Set() + private[this] var childSet: Set[Symbol] = Set() override def children = childSet override def addChild(sym: Symbol) { childSet = childSet + sym } @@ -2523,10 +2544,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** An object representing a missing symbol */ - object NoSymbol extends Symbol(null, NoPosition, nme.NO_NAME) { - setInfo(NoType) - privateWithin = this - override def info_=(info: Type) { + class NoSymbol extends Symbol(null, NoPosition, nme.NO_NAME) { + synchronized { + setInfo(NoType) + privateWithin = this + } + override def info_=(info: Type) = { infos = TypeHistory(1, NoType, null) unlock() validTo = currentPeriod @@ -2553,6 +2576,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def originalEnclosingMethod = this } + protected def makeNoSymbol = new NoSymbol + + lazy val NoSymbol = makeNoSymbol + /** Derives a new list of symbols from the given list by mapping the given * list across the given function. Then fixes the info of all the new symbols * by substituting the new symbols for the original symbols. diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 73f1f3db84..4f4715498e 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -109,15 +109,19 @@ trait Types extends api.Types { self: SymbolTable => /** A log of type variable with their original constraints. Used in order * to undo constraints in the case of isSubType/isSameType failure. */ - object undoLog { - private type UndoLog = List[(TypeVar, TypeConstraint)] - private[scala] var log: UndoLog = List() - + lazy val undoLog = newUndoLog + + protected def newUndoLog = new UndoLog + + class UndoLog { + private type UndoPairs = List[(TypeVar, TypeConstraint)] + private var log: UndoPairs = List() + // register with the auto-clearing cache manager perRunCaches.recordCache(this) /** Undo all changes to constraints to type variables upto `limit`. */ - private def undoTo(limit: UndoLog) { + private def undoTo(limit: UndoPairs) { while ((log ne limit) && log.nonEmpty) { val (tv, constr) = log.head tv.constr = constr @@ -125,9 +129,14 @@ trait Types extends api.Types { self: SymbolTable => } } - private[Types] def record(tv: TypeVar) = { + /** No sync necessary, because record should only + * be called from within a undo or undoUnless block, + * which is already synchronized. + */ + private[reflect] def record(tv: TypeVar) = { log ::= ((tv, tv.constr.cloneInternal)) } + private[scala] def clear() { if (settings.debug.value) self.log("Clearing " + log.size + " entries from the undoLog.") @@ -249,8 +258,7 @@ trait Types extends api.Types { self: SymbolTable => abstract class AbsTypeImpl extends AbsType { this: Type => def declaration(name: Name): Symbol = decl(name) def nonPrivateDeclaration(name: Name): Symbol = nonPrivateDecl(name) - def allDeclarations = decls - def allMembers = members + def declarations = decls def typeArguments = typeArgs def erasedType = transformedType(this) } @@ -873,16 +881,7 @@ trait Types extends api.Types { self: SymbolTable => * after `maxTostringRecursions` recursion levels. Uses `safeToString` * to produce a string on each level. */ - override def toString: String = - if (tostringRecursions >= maxTostringRecursions) - "..." - else - try { - tostringRecursions += 1 - safeToString - } finally { - tostringRecursions -= 1 - } + override def toString: String = typeToString(this) /** Method to be implemented in subclasses. * Converts this type to a string in calling toString for its parts. @@ -992,7 +991,9 @@ trait Types extends api.Types { self: SymbolTable => if (membertpe eq null) membertpe = self.memberType(member) (membertpe matches self.memberType(sym)) })) { - members = new Scope(List(member, sym)) + members = newScope + members enter member + members enter sym } } else { var prevEntry = members.lookupEntry(sym.name) @@ -1105,7 +1106,7 @@ trait Types extends api.Types { self: SymbolTable => /** A base class for types that represent a single value * (single-types and this-types). */ - abstract class SingletonType extends SubType with SimpleTypeProxy with AbsSingletonType { + abstract class SingletonType extends SubType with SimpleTypeProxy { def supertype = underlying override def isTrivial = false override def isStable = true @@ -1231,18 +1232,15 @@ trait Types extends api.Types { self: SymbolTable => override val isTrivial: Boolean = pre.isTrivial // override def isNullable = underlying.isNullable override def isNotNull = underlying.isNotNull - private var underlyingCache: Type = NoType - private var underlyingPeriod = NoPeriod + private[reflect] var underlyingCache: Type = NoType + private[reflect] var underlyingPeriod = NoPeriod override def underlying: Type = { - val period = underlyingPeriod - if (period != currentPeriod) { - underlyingPeriod = currentPeriod - if (!isValid(period)) { - underlyingCache = pre.memberType(sym).resultType; - assert(underlyingCache ne this, this) - } + val cache = underlyingCache + if (underlyingPeriod == currentPeriod && cache != null) cache + else { + defineUnderlyingOfSingleType(this) + underlyingCache } - underlyingCache } // more precise conceptually, but causes cyclic errors: (paramss exists (_ contains sym)) @@ -1281,6 +1279,17 @@ trait Types extends api.Types { self: SymbolTable => unique(new UniqueSingleType(pre, sym)) } } + + protected def defineUnderlyingOfSingleType(tpe: SingleType) = { + val period = tpe.underlyingPeriod + if (period != currentPeriod) { + tpe.underlyingPeriod = currentPeriod + if (!isValid(period)) { + tpe.underlyingCache = tpe.pre.memberType(tpe.sym).resultType; + assert(tpe.underlyingCache ne tpe, tpe) + } + } + } abstract case class SuperType(thistpe: Type, supertpe: Type) extends SingletonType { override val isTrivial: Boolean = thistpe.isTrivial && supertpe.isTrivial @@ -1333,105 +1342,30 @@ trait Types extends api.Types { self: SymbolTable => */ abstract class CompoundType extends Type { - var baseTypeSeqCache: BaseTypeSeq = _ - private var baseTypeSeqPeriod = NoPeriod - private var baseClassesCache: List[Symbol] = _ - private var baseClassesPeriod = NoPeriod + private[reflect] var baseTypeSeqCache: BaseTypeSeq = _ + private[reflect] var baseTypeSeqPeriod = NoPeriod + private[reflect] var baseClassesCache: List[Symbol] = _ + private[reflect] var baseClassesPeriod = NoPeriod override def baseTypeSeq: BaseTypeSeq = { - val period = baseTypeSeqPeriod; - if (period != currentPeriod) { // no caching in IDE - baseTypeSeqPeriod = currentPeriod - if (!isValidForBaseClasses(period)) { - if (parents.exists(_.exists(_.isInstanceOf[TypeVar]))) { - // rename type vars to fresh type params, take base type sequence of - // resulting type, and rename back all the entries in that sequence - var tvs = Set[TypeVar]() - for (p <- parents) - for (t <- p) t match { - case tv: TypeVar => tvs += tv - case _ => - } - val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap - val paramToVarMap = varToParamMap map (_.swap) - val varToParam = new TypeMap { - def apply(tp: Type) = varToParamMap get tp match { - case Some(sym) => sym.tpe - case _ => mapOver(tp) - } - } - val paramToVar = new TypeMap { - def apply(tp: Type) = tp match { - case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym) - case _ => mapOver(tp) - } - } - val bts = copyRefinedType(this.asInstanceOf[RefinedType], parents map varToParam, varToParam mapOver decls).baseTypeSeq - baseTypeSeqCache = bts lateMap paramToVar - } else { - incCounter(compoundBaseTypeSeqCount) - baseTypeSeqCache = undetBaseTypeSeq - baseTypeSeqCache = if (typeSymbol.isRefinementClass) - memo(compoundBaseTypeSeq(this))(_.baseTypeSeq updateHead typeSymbol.tpe) - else - compoundBaseTypeSeq(this) - // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors - // when compiling with - // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala - // I have not yet figured out precisely why this is the case. - // My current assumption is that taking memos forces baseTypeSeqs to be computed - // at stale types (i.e. the underlying typeSymbol has already another type). - // I do not yet see precisely why this would cause a problem, but it looks - // fishy in any case. - } - } - //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG + val cached = baseTypeSeqCache + if (baseTypeSeqPeriod == currentPeriod && cached != null && cached != undetBaseTypeSeq) + cached + else { + defineBaseTypeSeqOfCompoundType(this) + baseTypeSeqCache } - if (baseTypeSeqCache eq undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + typeSymbol) - baseTypeSeqCache } override def baseTypeSeqDepth: Int = baseTypeSeq.maxDepth override def baseClasses: List[Symbol] = { - def computeBaseClasses: List[Symbol] = - if (parents.isEmpty) List(typeSymbol) - else { - //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG - // optimized, since this seems to be performance critical - val superclazz = parents.head - var mixins = parents.tail - val sbcs = superclazz.baseClasses - var bcs = sbcs - def isNew(clazz: Symbol): Boolean = ( - superclazz.baseTypeIndex(clazz) < 0 && - { var p = bcs; - while ((p ne sbcs) && (p.head != clazz)) p = p.tail; - p eq sbcs - } - ); - while (!mixins.isEmpty) { - def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] = - if (mbcs.isEmpty) bcs - else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail) - else addMixinBaseClasses(mbcs.tail); - bcs = addMixinBaseClasses(mixins.head.baseClasses) - mixins = mixins.tail - } - typeSymbol :: bcs - } - val period = baseClassesPeriod - if (period != currentPeriod) { - baseClassesPeriod = currentPeriod - if (!isValidForBaseClasses(period)) { - baseClassesCache = null - baseClassesCache = memo(computeBaseClasses)(typeSymbol :: _.baseClasses.tail) - } + val cached = baseClassesCache + if (baseClassesPeriod == currentPeriod && cached != null) cached + else { + defineBaseClassesOfCompoundType(this) + baseClassesCache } - if (baseClassesCache eq null) - throw new TypeError("illegal cyclic reference involving " + typeSymbol) - baseClassesCache } /** The slightly less idiomatic use of Options is due to @@ -1475,6 +1409,97 @@ trait Types extends api.Types { self: SymbolTable => (if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) decls.mkString("{", "; ", "}") else "") } + + protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = { + val period = tpe.baseTypeSeqPeriod; + if (period != currentPeriod) { + tpe.baseTypeSeqPeriod = currentPeriod + if (!isValidForBaseClasses(period)) { + if (tpe.parents.exists(_.exists(_.isInstanceOf[TypeVar]))) { + // rename type vars to fresh type params, take base type sequence of + // resulting type, and rename back all the entries in that sequence + var tvs = Set[TypeVar]() + for (p <- tpe.parents) + for (t <- p) t match { + case tv: TypeVar => tvs += tv + case _ => + } + val varToParamMap: Map[Type, Symbol] = tvs map (tv => tv -> tv.origin.typeSymbol.cloneSymbol) toMap + val paramToVarMap = varToParamMap map (_.swap) + val varToParam = new TypeMap { + def apply(tp: Type) = varToParamMap get tp match { + case Some(sym) => sym.tpe + case _ => mapOver(tp) + } + } + val paramToVar = new TypeMap { + def apply(tp: Type) = tp match { + case TypeRef(_, tsym, _) if paramToVarMap.isDefinedAt(tsym) => paramToVarMap(tsym) + case _ => mapOver(tp) + } + } + val bts = copyRefinedType(tpe.asInstanceOf[RefinedType], tpe.parents map varToParam, varToParam mapOver tpe.decls).baseTypeSeq + tpe.baseTypeSeqCache = bts lateMap paramToVar + } else { + incCounter(compoundBaseTypeSeqCount) + tpe.baseTypeSeqCache = undetBaseTypeSeq + tpe.baseTypeSeqCache = if (tpe.typeSymbol.isRefinementClass) + tpe.memo(compoundBaseTypeSeq(tpe))(_.baseTypeSeq updateHead tpe.typeSymbol.tpe) + else + compoundBaseTypeSeq(tpe) + // [Martin] suppressing memo-ization solves the problem with "same type after erasure" errors + // when compiling with + // scalac scala.collection.IterableViewLike.scala scala.collection.IterableLike.scala + // I have not yet figured out precisely why this is the case. + // My current assumption is that taking memos forces baseTypeSeqs to be computed + // at stale types (i.e. the underlying typeSymbol has already another type). + // I do not yet see precisely why this would cause a problem, but it looks + // fishy in any case. + } + } + } + //Console.println("baseTypeSeq(" + typeSymbol + ") = " + baseTypeSeqCache.toList);//DEBUG + if (tpe.baseTypeSeqCache eq undetBaseTypeSeq) + throw new TypeError("illegal cyclic inheritance involving " + tpe.typeSymbol) + } + + protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = { + def computeBaseClasses: List[Symbol] = + if (tpe.parents.isEmpty) List(tpe.typeSymbol) + else { + //Console.println("computing base classes of " + typeSymbol + " at phase " + phase);//DEBUG + // optimized, since this seems to be performance critical + val superclazz = tpe.parents.head + var mixins = tpe.parents.tail + val sbcs = superclazz.baseClasses + var bcs = sbcs + def isNew(clazz: Symbol): Boolean = + superclazz.baseTypeIndex(clazz) < 0 && + { var p = bcs; + while ((p ne sbcs) && (p.head != clazz)) p = p.tail; + p eq sbcs + } + while (!mixins.isEmpty) { + def addMixinBaseClasses(mbcs: List[Symbol]): List[Symbol] = + if (mbcs.isEmpty) bcs + else if (isNew(mbcs.head)) mbcs.head :: addMixinBaseClasses(mbcs.tail) + else addMixinBaseClasses(mbcs.tail) + bcs = addMixinBaseClasses(mixins.head.baseClasses) + mixins = mixins.tail + } + tpe.typeSymbol :: bcs + } + val period = tpe.baseClassesPeriod + if (period != currentPeriod) { + tpe.baseClassesPeriod = currentPeriod + if (!isValidForBaseClasses(period)) { + tpe.baseClassesCache = null + tpe.baseClassesCache = tpe.memo(computeBaseClasses)(tpe.typeSymbol :: _.baseClasses.tail) + } + } + if (tpe.baseClassesCache eq null) + throw new TypeError("illegal cyclic reference involving " + tpe.typeSymbol) + } /** A class representing intersection types with refinements of the form * ` with ... with { decls }` @@ -1583,7 +1608,7 @@ trait Types extends api.Types { self: SymbolTable => * by a path which contains at least one expansive reference. * @See Kennedy, Pierce: On Decidability of Nominal Subtyping with Variance */ - def expansiveRefs(tparam: Symbol) = { + private[scala] def expansiveRefs(tparam: Symbol) = { if (state == UnInitialized) { computeRefs() while (state != Initialized) propagate() @@ -1597,10 +1622,16 @@ trait Types extends api.Types { self: SymbolTable => /** The type parameters which are referenced type parameters of this class. * Two entries: refs(0): Non-expansive references * refs(1): Expansive references + * Syncnote: This var need not be protected with synchronized, because + * it is accessed only from expansiveRefs, which is called only from + * Typer. */ private var refs: Array[RefMap] = _ /** The initialization state of the class: UnInialized --> Initializing --> Initialized + * Syncnote: This var need not be protected with synchronized, because + * it is accessed only from expansiveRefs, which is called only from + * Typer. */ private var state = UnInitialized @@ -1750,6 +1781,10 @@ trait Types extends api.Types { self: SymbolTable => } } + /* Syncnote: The `volatile` var and `pendingVolatiles` mutable set need not be protected + * with synchronized, because they are accessed only from isVolatile, which is called only from + * Typer. + */ private var volatileRecursions: Int = 0 private val pendingVolatiles = new mutable.HashSet[Symbol] @@ -1831,13 +1866,18 @@ trait Types extends api.Types { self: SymbolTable => if (sym == clazz) this else transform(sym.info.baseType(clazz)) } + trait NonClassTypeRef extends TypeRef { require(sym.isNonClassType, sym) + /* Syncnote: These are pure caches for performance; no problem to evaluate these + * several times. Hence, no need to protected with synchronzied in a mutli-threaded + * usage scenario. + */ private var relativeInfoCache: Type = _ private var memberInfoCache: Type = _ - private def relativeInfo = { + private[Types] def relativeInfo = { val memberInfo = pre.memberInfo(sym) if (relativeInfoCache == null || (memberInfo ne memberInfoCache)) { memberInfoCache = memberInfo @@ -1846,25 +1886,27 @@ trait Types extends api.Types { self: SymbolTable => relativeInfoCache } - override def baseType(clazz: Symbol): Type = ( - if (sym == clazz) this else try { - basetypeRecursions += 1 - if (basetypeRecursions < LogPendingBaseTypesThreshold) - relativeInfo.baseType(clazz) - else if (pendingBaseTypes contains this) - if (clazz == AnyClass) clazz.tpe else NoType - else - try { - pendingBaseTypes += this - relativeInfo.baseType(clazz) - } finally { - pendingBaseTypes -= this - } + override def baseType(clazz: Symbol): Type = + if (sym == clazz) this else baseTypeOfNonClassTypeRef(this, clazz) + } + + protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = try { + basetypeRecursions += 1 + if (basetypeRecursions < LogPendingBaseTypesThreshold) + tpe.relativeInfo.baseType(clazz) + else if (pendingBaseTypes contains tpe) + if (clazz == AnyClass) clazz.tpe else NoType + else + try { + pendingBaseTypes += tpe + tpe.relativeInfo.baseType(clazz) } finally { - basetypeRecursions -= 1 + pendingBaseTypes -= tpe } - ) + } finally { + basetypeRecursions -= 1 } + trait AliasTypeRef extends NonClassTypeRef { require(sym.isAliasType, sym) @@ -1912,6 +1954,8 @@ trait Types extends api.Types { self: SymbolTable => trait AbstractTypeRef extends NonClassTypeRef { require(sym.isAbstractType, sym) + /** Syncnote: Pure performance caches; no need to synchronize in multi-threaded environment + */ private var symInfoCache: Type = _ private var thisInfoCache: Type = _ @@ -1938,6 +1982,7 @@ trait Types extends api.Types { self: SymbolTable => volatileRecursions -= 1 } } + override def thisInfo = { val symInfo = sym.info if (thisInfoCache == null || (symInfo ne symInfoCache)) { @@ -1955,7 +2000,7 @@ trait Types extends api.Types { self: SymbolTable => override def isStable = bounds.hi.typeSymbol isSubClass SingletonClass override def bounds = thisInfo.bounds // def transformInfo(tp: Type): Type = appliedType(tp.asSeenFrom(pre, sym.owner), typeArgsOrDummies) - override protected def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this + override protected[Types] def baseTypeSeqImpl: BaseTypeSeq = transform(bounds.hi).baseTypeSeq prepend this } /** A class for named types of the form @@ -1966,11 +2011,11 @@ trait Types extends api.Types { self: SymbolTable => * @M: a higher-kinded type is represented as a TypeRef with sym.typeParams.nonEmpty, but args.isEmpty */ abstract case class TypeRef(pre: Type, sym: Symbol, args: List[Type]) extends Type { - private var parentsCache: List[Type] = _ - private var parentsPeriod = NoPeriod - private var baseTypeSeqCache: BaseTypeSeq = _ - private var baseTypeSeqPeriod = NoPeriod - private var normalized: Type = _ + private[reflect] var parentsCache: List[Type] = _ + private[reflect] var parentsPeriod = NoPeriod + private[reflect] var baseTypeSeqCache: BaseTypeSeq = _ + private[reflect] var baseTypeSeqPeriod = NoPeriod + private var normalized: Type = _ // @M: propagate actual type params (args) to `tp`, by replacing // formal type parameters with actual ones. If tp is higher kinded, @@ -2030,16 +2075,12 @@ trait Types extends api.Types { self: SymbolTable => sym.isModuleClass || sym == NothingClass || isValueClass(sym) || super.isNotNull override def parents: List[Type] = { - val period = parentsPeriod - if (period != currentPeriod) { - parentsPeriod = currentPeriod - if (!isValidForBaseClasses(period)) { - parentsCache = thisInfo.parents map transform - } else if (parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641 - parentsCache = List(AnyClass.tpe) - } + val cache = parentsCache + if (parentsPeriod == currentPeriod && cache != null) cache + else { + defineParentsOfTypeRef(this) + parentsCache } - parentsCache } override def decls: Scope = { @@ -2051,21 +2092,16 @@ trait Types extends api.Types { self: SymbolTable => thisInfo.decls } - protected def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform + protected[Types] def baseTypeSeqImpl: BaseTypeSeq = sym.info.baseTypeSeq map transform override def baseTypeSeq: BaseTypeSeq = { - val period = baseTypeSeqPeriod - if (period != currentPeriod) { - baseTypeSeqPeriod = currentPeriod - if (!isValidForBaseClasses(period)) { - incCounter(typerefBaseTypeSeqCount) - baseTypeSeqCache = undetBaseTypeSeq - baseTypeSeqCache = baseTypeSeqImpl - } + val cache = baseTypeSeqCache + if (baseTypeSeqPeriod == currentPeriod && cache != null && cache != undetBaseTypeSeq) + cache + else { + defineBaseTypeSeqOfTypeRef(this) + baseTypeSeqCache } - if (baseTypeSeqCache == undetBaseTypeSeq) - throw new TypeError("illegal cyclic inheritance involving " + sym) - baseTypeSeqCache } private def preString = ( @@ -2151,6 +2187,32 @@ trait Types extends api.Types { self: SymbolTable => } }) } + + protected def defineParentsOfTypeRef(tpe: TypeRef) = { + val period = tpe.parentsPeriod + if (period != currentPeriod) { + tpe.parentsPeriod = currentPeriod + if (!isValidForBaseClasses(period)) { + tpe.parentsCache = tpe.thisInfo.parents map tpe.transform + } else if (tpe.parentsCache == null) { // seems this can happen if things are corrupted enough, see #2641 + tpe.parentsCache = List(AnyClass.tpe) + } + } + } + + protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = { + val period = tpe.baseTypeSeqPeriod + if (period != currentPeriod) { + tpe.baseTypeSeqPeriod = currentPeriod + if (!isValidForBaseClasses(period)) { + incCounter(typerefBaseTypeSeqCount) + tpe.baseTypeSeqCache = undetBaseTypeSeq + tpe.baseTypeSeqCache = tpe.baseTypeSeqImpl + } + } + if (tpe.baseTypeSeqCache == undetBaseTypeSeq) + throw new TypeError("illegal cyclic inheritance involving " + tpe.sym) + } /** A class representing a method type with parameters. * Note that a parameterless method is represented by a NullaryMethodType: @@ -2577,7 +2639,12 @@ trait Types extends api.Types { self: SymbolTable => override def typeArgs: List[Type] = Nil override def isHigherKinded = false - /** The constraint associated with the variable */ + /** The constraint associated with the variable + * Syncnote: Type variables are assumed to be used from only one + * thread. They are not exposed in api.Types and are used only locally + * in operations that are exposed from types. Hence, no syncing of `constr` + * or `encounteredHigherLevel` or `suspended` accesses should be necessary. + */ var constr = constr0 def instValid = constr.instValid @@ -3048,7 +3115,7 @@ trait Types extends api.Types { self: SymbolTable => * @return ... */ def refinedType(parents: List[Type], owner: Symbol): Type = - refinedType(parents, owner, new Scope, owner.pos) + refinedType(parents, owner, newScope, owner.pos) def copyRefinedType(original: RefinedType, parents: List[Type], decls: Scope) = if ((parents eq original.parents) && (decls eq original.decls)) original @@ -3338,7 +3405,7 @@ trait Types extends api.Types { self: SymbolTable => private var uniques: util.HashSet[Type] = _ private var uniqueRunId = NoRunId - private def unique[T <: Type](tp: T): T = { + protected def unique[T <: Type](tp: T): T = { incCounter(rawTypeCount) if (uniqueRunId != currentRunId) { uniques = util.HashSet[Type]("uniques", initialUniquesCapacity) @@ -3362,6 +3429,12 @@ trait Types extends api.Types { self: SymbolTable => def this(lo0: List[Type], hi0: List[Type]) = this(lo0, hi0, NoType, NoType) def this(bounds: TypeBounds) = this(List(bounds.lo), List(bounds.hi)) def this() = this(List(), List()) + + /* Syncnote: Type constraints are assumed to be used from only one + * thread. They are not exposed in api.Types and are used only locally + * in operations that are exposed from types. Hence, no syncing of any + * variables should be ncessesary. + */ /** Guard these lists against AnyClass and NothingClass appearing, * else loBounds.isEmpty will have different results for an empty @@ -3634,7 +3707,7 @@ trait Types extends api.Types { self: SymbolTable => val elems = scope.toList val elems1 = mapOver(elems) if (elems1 eq elems) scope - else new Scope(elems1) + else newScopeWith(elems1: _*) } /** Map this function over given list of symbols */ @@ -3699,6 +3772,11 @@ trait Types extends api.Types { self: SymbolTable => def traverse(tp: Type): Unit def apply(tp: Type): Type = { traverse(tp); tp } } + + abstract class TypeTraverserWithResult[T] extends TypeTraverser { + def result: T + def clear(): Unit + } abstract class TypeCollector[T](initial: T) extends TypeTraverser { var result: T = _ @@ -3753,7 +3831,7 @@ trait Types extends api.Types { self: SymbolTable => * the conversion of raw types to existential types might not have taken place * in ClassFileparser.sigToType (where it is usually done). */ - object rawToExistential extends TypeMap { + def rawToExistential = new TypeMap { private var expanded = immutable.Set[Symbol]() private var generated = immutable.Set[Type]() def apply(tp: Type): Type = tp match { @@ -4365,15 +4443,20 @@ trait Types extends api.Types { self: SymbolTable => private def commonOwner(tps: List[Type]): Symbol = { if (tps.isEmpty) NoSymbol else { - commonOwnerMap.result = null + commonOwnerMap.clear() tps foreach (commonOwnerMap traverse _) val result = if (commonOwnerMap.result ne null) commonOwnerMap.result else NoSymbol debuglog(tps.mkString("commonOwner(", ", ", ") == " + result)) result } } - private object commonOwnerMap extends TypeTraverser { + + protected def commonOwnerMap: CommonOwnerMap = commonOwnerMapObj + + protected class CommonOwnerMap extends TypeTraverserWithResult[Symbol] { var result: Symbol = _ + + def clear() { result = null } private def register(sym: Symbol) { // First considered type is the trivial result. @@ -4390,12 +4473,15 @@ trait Types extends api.Types { self: SymbolTable => case _ => mapOver(tp) } } + + private lazy val commonOwnerMapObj = new CommonOwnerMap class MissingAliasControl extends ControlThrowable val missingAliasException = new MissingAliasControl class MissingTypeControl extends ControlThrowable object adaptToNewRunMap extends TypeMap { + private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = { if (phase.flatClasses) { sym @@ -5832,7 +5918,7 @@ trait Types extends api.Types { self: SymbolTable => } /** The least upper bound wrt <:< of a list of types */ - def lub(ts: List[Type], depth: Int): Type = { + private def lub(ts: List[Type], depth: Int): Type = { def lub0(ts0: List[Type]): Type = elimSub(ts0, depth) match { case List() => NothingClass.tpe case List(t) => t @@ -5983,7 +6069,7 @@ trait Types extends api.Types { self: SymbolTable => /** The greatest lower bound wrt <:< of a list of types, which have been normalized * wrt elimSuper */ - private def glbNorm(ts: List[Type], depth: Int): Type = { + protected def glbNorm(ts: List[Type], depth: Int): Type = { def glb0(ts0: List[Type]): Type = ts0 match { case List() => AnyClass.tpe case List(t) => t @@ -6279,7 +6365,7 @@ trait Types extends api.Types { self: SymbolTable => private var indent: String = "" /** Perform operation `p` on arguments `tp1`, `arg2` and print trace of computation. */ - private def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = { + protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = { Console.println(indent + tp1 + " " + op + " " + arg2 + "?" /* + "("+tp1.getClass+","+arg2.getClass+")"*/) indent = indent + " " val result = p(tp1, arg2) @@ -6324,4 +6410,16 @@ trait Types extends api.Types { self: SymbolTable => final val maxTostringRecursions = 50 private var tostringRecursions = 0 + + protected def typeToString(tpe: Type): String = + if (tostringRecursions >= maxTostringRecursions) + "..." + else + try { + tostringRecursions += 1 + tpe.safeToString + } finally { + tostringRecursions -= 1 + } + } diff --git a/src/compiler/scala/reflect/runtime/Loaders.scala b/src/compiler/scala/reflect/runtime/Loaders.scala index 7aca052fa9..0a5a21de1e 100644 --- a/src/compiler/scala/reflect/runtime/Loaders.scala +++ b/src/compiler/scala/reflect/runtime/Loaders.scala @@ -97,7 +97,7 @@ trait Loaders { self: SymbolTable => 0 < dp && dp < (name.length - 1) } - class PackageScope(pkgClass: Symbol) extends Scope { + class PackageScope(pkgClass: Symbol) extends Scope() with SynchronizedScope { assert(pkgClass.isType) private var negatives = mutable.Set[Name]() override def lookupEntry(name: Name): ScopeEntry = { diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala index 9490dc4ad7..47fc9f2dcf 100644 --- a/src/compiler/scala/reflect/runtime/Mirror.scala +++ b/src/compiler/scala/reflect/runtime/Mirror.scala @@ -40,7 +40,8 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe override def typeToClass(tpe: Type): java.lang.Class[_] = typeToJavaClass(tpe) override def symbolToClass(sym: Symbol): java.lang.Class[_] = classToJava(sym) - + + override def inReflexiveMirror = true } object Mirror extends Mirror diff --git a/src/compiler/scala/reflect/runtime/SymbolTable.scala b/src/compiler/scala/reflect/runtime/SymbolTable.scala index d1a806bcef..5331f0a53e 100644 --- a/src/compiler/scala/reflect/runtime/SymbolTable.scala +++ b/src/compiler/scala/reflect/runtime/SymbolTable.scala @@ -6,7 +6,7 @@ package runtime * It can be used either from the reflexive mirror itself (class Universe), or else from * a runtime compiler that uses reflection to get a class information (class scala.tools.nsc.ReflectGlobal) */ -trait SymbolTable extends internal.SymbolTable with JavaToScala with ScalaToJava with Loaders { +trait SymbolTable extends internal.SymbolTable with JavaToScala with ScalaToJava with Loaders with SynchronizedOps { /** If `owner` is a package class (but not the empty package) and `name` is a term name, make a new package * ., otherwise return NoSymbol. diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala new file mode 100644 index 0000000000..98694c2ddf --- /dev/null +++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala @@ -0,0 +1,52 @@ +package scala.reflect +package runtime + +trait SynchronizedOps extends internal.SymbolTable + with SynchronizedSymbols + with SynchronizedTypes { self: SymbolTable => + +// Names + + private lazy val nameLock = new Object + + override def newTermName(s: String): TermName = nameLock.synchronized { super.newTermName(s) } + override def newTypeName(s: String): TypeName = nameLock.synchronized { super.newTypeName(s) } + +// BaseTypeSeqs + + override protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) = + new BaseTypeSeq(parents, elems) with SynchronizedBaseTypeSeq + + trait SynchronizedBaseTypeSeq extends BaseTypeSeq { + override def apply(i: Int): Type = synchronized { super.apply(i) } + override def rawElem(i: Int) = synchronized { super.rawElem(i) } + override def typeSymbol(i: Int): Symbol = synchronized { super.typeSymbol(i) } + override def toList: List[Type] = synchronized { super.toList } + override def copy(head: Type, offset: Int): BaseTypeSeq = synchronized { super.copy(head, offset) } + override def map(f: Type => Type): BaseTypeSeq = synchronized { super.map(f) } + override def exists(p: Type => Boolean): Boolean = synchronized { super.exists(p) } + override lazy val maxDepth = synchronized { maxDepthOfElems } + override def toString = synchronized { super.toString } + + override def lateMap(f: Type => Type): BaseTypeSeq = new MappedBaseTypeSeq(this, f) with SynchronizedBaseTypeSeq + } + +// Scopes + + override def newScope = new Scope() with SynchronizedScope + override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope +// override def newScopeWith(elems: ScopeEntry): Scope = new Scope(elems) with SynchronizedScope + + trait SynchronizedScope extends Scope { + override def isEmpty: Boolean = synchronized { super.isEmpty } + override def size: Int = synchronized { super.size } + override def enter(sym: Symbol) = synchronized { super.enter(sym) } + override def rehash(sym: Symbol, newname: Name) = synchronized { super.rehash(sym, newname) } + override def unlink(e: ScopeEntry) = synchronized { super.unlink(e) } + override def unlink(sym: Symbol) = synchronized { super.unlink(sym) } + override def lookupAll(name: Name) = synchronized { super.lookupAll(name) } + override def lookupEntry(name: Name) = synchronized { super.lookupEntry(name) } + override def lookupNextEntry(entry: ScopeEntry) = synchronized { super.lookupNextEntry(entry) } + override def toList: List[Symbol] = synchronized { super.toList } + } +} diff --git a/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala new file mode 100644 index 0000000000..9baf94f71d --- /dev/null +++ b/src/compiler/scala/reflect/runtime/SynchronizedSymbols.scala @@ -0,0 +1,119 @@ +package scala.reflect +package runtime + +import internal.Flags.DEFERRED + +trait SynchronizedSymbols extends internal.Symbols { self: SymbolTable => + + override protected def nextId() = synchronized { super.nextId() } + + override protected def freshExistentialName(suffix: String) = + synchronized { super.freshExistentialName(suffix) } + + // Set the fields which point companions at one another. Returns the module. + override def connectModuleToClass(m: ModuleSymbol, moduleClass: ClassSymbol): ModuleSymbol = + synchronized { super.connectModuleToClass(m, moduleClass) } + + override def newFreeVar(name: TermName, tpe: Type, value: Any, newFlags: Long = 0L): FreeVar = + new FreeVar(name, value) with SynchronizedTermSymbol initFlags newFlags setInfo tpe + + override protected def makeNoSymbol = new NoSymbol with SynchronizedSymbol + + trait SynchronizedSymbol extends Symbol { + + override def rawowner = synchronized { super.rawowner } + override def rawname = synchronized { super.rawname } + override def rawflags = synchronized { super.rawflags } + + override def rawflags_=(x: FlagsType) = synchronized { super.rawflags_=(x) } + override def name_=(x: Name) = synchronized { super.name_=(x) } + override def owner_=(owner: Symbol) = synchronized { super.owner_=(owner) } + + override def validTo = synchronized { super.validTo } + override def validTo_=(x: Period) = synchronized { super.validTo_=(x) } + + override def pos = synchronized { super.pos } + override def setPos(pos: Position): this.type = { synchronized { super.setPos(pos) }; this } + + override def privateWithin = synchronized { super.privateWithin } + override def privateWithin_=(sym: Symbol) = synchronized { super.privateWithin_=(sym) } + + override def info = synchronized { super.info } + override def info_=(info: Type) = synchronized { super.info_=(info) } + override def updateInfo(info: Type): Symbol = synchronized { super.updateInfo(info) } + override def rawInfo: Type = synchronized { super.rawInfo } + + override def typeParams: List[Symbol] = synchronized { super.typeParams } + + override def reset(completer: Type) = synchronized { super.reset(completer) } + + override def infosString: String = synchronized { super.infosString } + + override def annotations: List[AnnotationInfo] = synchronized { super.annotations } + override def setAnnotations(annots: List[AnnotationInfo]): this.type = { synchronized { super.setAnnotations(annots) }; this } + + +// ------ creators ------------------------------------------------------------------- + + override def newTermSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): TermSymbol = + new TermSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags + + override def newAbstractTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AbstractTypeSymbol = + new AbstractTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags + + override def newAliasTypeSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): AliasTypeSymbol = + new AliasTypeSymbol(this, pos, name) with SynchronizedTypeSymbol initFlags newFlags + + override def newModuleSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleSymbol = + new ModuleSymbol(this, pos, name) with SynchronizedTermSymbol initFlags newFlags + + override def newMethodSymbol(name: TermName, pos: Position = NoPosition, newFlags: Long = 0L): MethodSymbol = + new MethodSymbol(this, pos, name) with SynchronizedMethodSymbol initFlags newFlags + + override def newClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ClassSymbol = + new ClassSymbol(this, pos, name) with SynchronizedClassSymbol initFlags newFlags + + override def newModuleClassSymbol(name: TypeName, pos: Position = NoPosition, newFlags: Long = 0L): ModuleClassSymbol = + new ModuleClassSymbol(this, pos, name) with SynchronizedModuleClassSymbol initFlags newFlags + + override def newTypeSkolemSymbol(name: TypeName, origin: AnyRef, pos: Position = NoPosition, newFlags: Long = 0L): TypeSkolem = + if ((newFlags & DEFERRED) == 0L) + new TypeSkolem(this, pos, name, origin) with SynchronizedTypeSymbol initFlags newFlags + else + new TypeSkolem(this, pos, name, origin) with AbstractTypeMixin with SynchronizedTypeSymbol initFlags newFlags + } + +// ------- subclasses --------------------------------------------------------------------- + + trait SynchronizedTermSymbol extends TermSymbol with SynchronizedSymbol { + override def referenced: Symbol = synchronized { super.referenced } + override def referenced_=(x: Symbol) = synchronized { super.referenced_=(x) } + } + + trait SynchronizedMethodSymbol extends MethodSymbol with SynchronizedTermSymbol { + override def typeAsMemberOf(pre: Type): Type = synchronized { super.typeAsMemberOf(pre) } + } + + trait SynchronizedTypeSymbol extends TypeSymbol with SynchronizedSymbol { + override def typeConstructor: Type = synchronized { super.typeConstructor } + override def tpe: Type = synchronized { super.tpe } + } + + trait SynchronizedClassSymbol extends ClassSymbol with SynchronizedTypeSymbol { + override def sourceFile = synchronized { super.sourceFile } + override def sourceFile_=(f: AbstractFileType) = synchronized { super.sourceFile_=(f) } + override def thisSym: Symbol = synchronized { super.thisSym } + override def thisType: Type = synchronized { super.thisType } + override def typeOfThis: Type = synchronized { super.typeOfThis } + override def typeOfThis_=(tp: Type) = synchronized { super.typeOfThis_=(tp) } + override def children = synchronized { super.children } + override def addChild(sym: Symbol) = synchronized { super.addChild(sym) } + } + + trait SynchronizedModuleClassSymbol extends ModuleClassSymbol with SynchronizedClassSymbol { + override def sourceModule = synchronized { super.sourceModule } + override def sourceModule_=(module: Symbol) = synchronized { super.sourceModule_=(module: Symbol) } + override def implicitMembers: List[Symbol] = synchronized { super.implicitMembers } + } +} + diff --git a/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala new file mode 100644 index 0000000000..c842d3dd01 --- /dev/null +++ b/src/compiler/scala/reflect/runtime/SynchronizedTypes.scala @@ -0,0 +1,87 @@ +package scala.reflect +package runtime + +/** This trait overrides methods in reflect.internal, bracketing + * them in synchronized { ... } to make them thread-safe + */ +trait SynchronizedTypes extends internal.Types { self: SymbolTable => + + // No sharing of map objects: + override protected def commonOwnerMap = new CommonOwnerMap + + private val uniqueLock = new Object + override def unique[T <: Type](tp: T): T = uniqueLock.synchronized { super.unique(tp) } + + class SynchronizedUndoLog extends UndoLog { + + override def clear() = + synchronized { super.clear() } + + override def undo[T](block: => T): T = + synchronized { super.undo(block) } + + override def undoUnless(block: => Boolean): Boolean = + synchronized { super.undoUnless(block) } + } + + override protected def newUndoLog = new SynchronizedUndoLog + + override protected def baseTypeOfNonClassTypeRef(tpe: NonClassTypeRef, clazz: Symbol) = + synchronized { super.baseTypeOfNonClassTypeRef(tpe, clazz) } + + private val subsametypeLock = new Object + + override def isSameType(tp1: Type, tp2: Type): Boolean = + subsametypeLock.synchronized { super.isSameType(tp1, tp2) } + + override def isDifferentType(tp1: Type, tp2: Type): Boolean = + subsametypeLock.synchronized { super.isDifferentType(tp1, tp2) } + + override def isSubType(tp1: Type, tp2: Type, depth: Int): Boolean = + subsametypeLock.synchronized { super.isSubType(tp1, tp2, depth) } + + private val lubglbLock = new Object + + override def glb(ts: List[Type]): Type = + lubglbLock.synchronized { super.glb(ts) } + + override def lub(ts: List[Type]): Type = + lubglbLock.synchronized { super.lub(ts) } + + private val indentLock = new Object + + override protected def explain[T](op: String, p: (Type, T) => Boolean, tp1: Type, arg2: T): Boolean = { + indentLock.synchronized { super.explain(op, p, tp1, arg2) } + } + + private val toStringLock = new Object + + override protected def typeToString(tpe: Type): String = + toStringLock.synchronized(super.typeToString(tpe)) + + /* The idea of caches is as follows. + * When in reflexive mode, a cache is either null, or one sentinal + * value representing undefined or the final defined + * value. Hence, we can ask in non-synchronized ode whether the cache field + * is non null and different from the sentinel (if a sentinel exists). + * If that's true, the cache value is current. + * Otherwise we arrive in one of the defined... methods listed below + * which go through all steps in synchronized mode. + */ + + override protected def defineUnderlyingOfSingleType(tpe: SingleType) = + tpe.synchronized { super.defineUnderlyingOfSingleType(tpe) } + + override protected def defineBaseTypeSeqOfCompoundType(tpe: CompoundType) = + tpe.synchronized { super.defineBaseTypeSeqOfCompoundType(tpe) } + + override protected def defineBaseClassesOfCompoundType(tpe: CompoundType) = + tpe.synchronized { super.defineBaseClassesOfCompoundType(tpe) } + + override protected def defineParentsOfTypeRef(tpe: TypeRef) = + tpe.synchronized { super.defineParentsOfTypeRef(tpe) } + + override protected def defineBaseTypeSeqOfTypeRef(tpe: TypeRef) = + tpe.synchronized { super.defineBaseTypeSeqOfTypeRef(tpe) } + +} \ No newline at end of file diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 8afd6d2231..9ab12c6a86 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -57,7 +57,7 @@ trait ToolBoxes extends { self: Universe => def wrapInObject(expr: Tree, fvs: List[Symbol]): ModuleDef = { val obj = EmptyPackageClass.newModule(nextWrapperModuleName()) - val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), new Scope, obj.moduleClass) + val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), newScope, obj.moduleClass) obj.moduleClass setInfo minfo obj setInfo obj.moduleClass.tpe val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName)) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index c8db996de2..9ec1256ca8 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -615,7 +615,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb object icodeChecker extends icodeCheckers.ICodeChecker() object typer extends analyzer.Typer( - analyzer.NoContext.make(EmptyTree, Global.this.definitions.RootClass, new Scope) + analyzer.NoContext.make(EmptyTree, Global.this.definitions.RootClass, newScope) ) /** Add the internal compiler phases to the phases set. diff --git a/src/compiler/scala/tools/nsc/matching/Patterns.scala b/src/compiler/scala/tools/nsc/matching/Patterns.scala index e5748b7c23..18409cfffe 100644 --- a/src/compiler/scala/tools/nsc/matching/Patterns.scala +++ b/src/compiler/scala/tools/nsc/matching/Patterns.scala @@ -37,7 +37,7 @@ trait Patterns extends ast.TreeDSL { // } private lazy val dummyMethod = - new TermSymbol(NoSymbol, NoPosition, newTermName("matching$dummy")) + NoSymbol.newTermSymbol(newTermName("matching$dummy")) // Fresh patterns def emptyPatterns(i: Int): List[Pattern] = List.fill(i)(NoPattern) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index 4205c2ff36..942ec1fa86 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -207,7 +207,7 @@ abstract class SymbolLoaders { protected def doComplete(root: Symbol) { assert(root.isPackageClass, root) - root.setInfo(new PackageClassInfoType(new Scope(), root)) + root.setInfo(new PackageClassInfoType(newScope, root)) val sourcepaths = classpath.sourcepaths for (classRep <- classpath.classes if platform.doLoad(classRep)) { diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 099145d3ae..a61c323824 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -536,8 +536,8 @@ abstract class ClassfileParser { addEnclosingTParams(clazz) parseInnerClasses() // also sets the isScala / isScalaRaw / hasMeta flags, see r15956 // get the class file parser to reuse scopes. - instanceDefs = new Scope - staticDefs = new Scope + instanceDefs = newScope + staticDefs = newScope val classInfo = ClassInfoType(parseParents, instanceDefs, clazz) val staticInfo = ClassInfoType(List(), staticDefs, statics) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala index ead431c8d7..eb8e7a14a5 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/MetaParser.scala @@ -108,7 +108,7 @@ abstract class MetaParser{ } protected def parseClass() { - locals = new Scope + locals = newScope def parse(): Type = { nextToken() if (token == "[") { @@ -130,7 +130,7 @@ abstract class MetaParser{ protected def parseMethod() { val globals = locals - locals = if (locals eq null) new Scope else new Scope(locals) + locals = if (locals eq null) newScope else newNestedScope(locals) def parse(): Type = { nextToken(); if (token == "[") PolyType(parseTypeParams(), parse()) diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala index 6c238f52cc..e11a5a4ad9 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala @@ -165,7 +165,7 @@ abstract class TypeParser { clrTypes.sym2type(typMgdPtr) = clazzMgdPtr /* clazzMgdPtr but not clazzBoxed is mapped by clrTypes.types into an msil.Type instance, because there's no metadata-level representation for a "boxed valuetype" */ - val instanceDefsMgdPtr = new Scope + val instanceDefsMgdPtr = newScope val classInfoMgdPtr = ClassInfoType(definitions.anyvalparam, instanceDefsMgdPtr, clazzMgdPtr) clazzMgdPtr.setFlag(flags) clazzMgdPtr.setInfo(classInfoMgdPtr) @@ -196,8 +196,8 @@ abstract class TypeParser { } } /* END CLR generics (snippet 2) */ - instanceDefs = new Scope - staticDefs = new Scope + instanceDefs = newScope + staticDefs = newScope val classInfoAsInMetadata = { val ifaces: Array[MSILType] = typ.getInterfaces() @@ -212,7 +212,7 @@ abstract class TypeParser { } // methods, properties, events, fields are entered in a moment if (canBeTakenAddressOf) { - val instanceDefsBoxed = new Scope + val instanceDefsBoxed = newScope ClassInfoType(parents.toList, instanceDefsBoxed, clazzBoxed) } else ClassInfoType(parents.toList, instanceDefs, clazz) diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 8f5d308b8f..05b2b7a437 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -119,7 +119,7 @@ abstract class AddInterfaces extends InfoTransform { * given the decls ifaceDecls of its interface. */ private def implDecls(implClass: Symbol, ifaceDecls: Scope): Scope = { - val decls = new Scope + val decls = newScope if ((ifaceDecls lookup nme.MIXIN_CONSTRUCTOR) == NoSymbol) decls enter ( implClass.newMethod(nme.MIXIN_CONSTRUCTOR, implClass.pos) diff --git a/src/compiler/scala/tools/nsc/transform/Constructors.scala b/src/compiler/scala/tools/nsc/transform/Constructors.scala index 23817545e2..d1c71faf1e 100644 --- a/src/compiler/scala/tools/nsc/transform/Constructors.scala +++ b/src/compiler/scala/tools/nsc/transform/Constructors.scala @@ -447,7 +447,7 @@ abstract class Constructors extends Transform with ast.TreeDSL { val closureClass = clazz.newClass(nme.delayedInitArg.toTypeName, impl.pos, SYNTHETIC | FINAL) val closureParents = List(AbstractFunctionClass(0).tpe, ScalaObjectClass.tpe) - closureClass setInfoAndEnter new ClassInfoType(closureParents, new Scope, closureClass) + closureClass setInfoAndEnter new ClassInfoType(closureParents, newScope, closureClass) val outerField = ( closureClass diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index 71696c24e6..b342b95742 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -742,7 +742,7 @@ abstract class Erasure extends AddInterfaces //println("computing bridges for " + owner)//DEBUG assert(phase == currentRun.erasurePhase) val site = owner.thisType - val bridgesScope = new Scope + val bridgesScope = newScope val bridgeTarget = new mutable.HashMap[Symbol, Symbol] var bridges: List[Tree] = List() val opc = atPhase(currentRun.explicitouterPhase) { diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index b17fd7b9b0..4fa5b52de3 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -65,7 +65,7 @@ abstract class Flatten extends InfoTransform { case ClassInfoType(parents, decls, clazz) => var parents1 = parents val decls1 = scopeTransform(clazz) { - val decls1 = new Scope() + val decls1 = newScope if (clazz.isPackageClass) { atPhase(phase.next)(decls foreach (decls1 enter _)) } else { diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index bf19cf10e9..bd29336703 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -403,12 +403,12 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { assert(clazz.sourceModule != NoSymbol || clazz.isAnonymousClass, clazz + " has no sourceModule: sym = " + sym + " sym.tpe = " + sym.tpe) parents1 = List() - decls1 = new Scope(decls.toList filter isImplementedStatically) + decls1 = newScopeWith(decls.toList filter isImplementedStatically: _*) } else if (!parents.isEmpty) { parents1 = parents.head :: (parents.tail map toInterface) } } - //decls1 = atPhase(phase.next)(new Scope(decls1.toList))//debug + //decls1 = atPhase(phase.next)(newScopeWith(decls1.toList: _*))//debug if ((parents1 eq parents) && (decls1 eq decls)) tp else ClassInfoType(parents1, decls1, clazz) @@ -480,7 +480,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { /** The rootContext used for typing */ private val rootContext = - erasure.NoContext.make(EmptyTree, RootClass, new Scope) + erasure.NoContext.make(EmptyTree, RootClass, newScope) /** The typer */ private var localTyper: erasure.Typer = _ diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 70f8d37585..1200e973c5 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -74,7 +74,7 @@ abstract class OverridingPairs { } /** The symbols that can take part in an overriding pair */ - private val decls = new Scope + private val decls = newScope // fill `decls` with overriding shadowing overridden */ { def fillDecls(bcs: List[Symbol], deferredflag: Int) { diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index fd826fb6d8..4a104857db 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -502,7 +502,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { typeEnv(sClass) = env this.specializedClass((clazz, env0)) = sClass - val decls1 = new Scope // declarations of the newly specialized class 'sClass' + val decls1 = newScope // declarations of the newly specialized class 'sClass' var oldClassTParams: List[Symbol] = Nil // original unspecialized type parameters var newClassTParams: List[Symbol] = Nil // unspecialized type parameters of 'specializedClass' (cloned) @@ -1089,7 +1089,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { if (tparams.nonEmpty) " (poly)" else "", clazz, parents1, phase) ) - val newScope = new Scope(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz)) + val newScope = newScopeWith(specializeClass(clazz, typeEnv(clazz)) ++ specialOverrides(clazz): _*) // If tparams.isEmpty, this is just the ClassInfoType. polyType(tparams, ClassInfoType(parents1, newScope, clazz)) case _ => diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 56d9658377..bf41ddab9b 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -259,7 +259,7 @@ abstract class UnCurry extends InfoTransform else if (isPartial) List(appliedType(AbstractPartialFunctionClass.typeConstructor, targs), SerializableClass.tpe) else List(ObjectClass.tpe, fun.tpe, SerializableClass.tpe) - anonClass setInfo ClassInfoType(parents, new Scope, anonClass) + anonClass setInfo ClassInfoType(parents, newScope, anonClass) val applyMethod = anonClass.newMethod(nme.apply, fun.pos, FINAL) applyMethod setInfoAndEnter MethodType(applyMethod newSyntheticValueParams formals, restpe) anonClass addAnnotation serialVersionUIDAnnotation diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index faff4ccab2..c2647c709a 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -218,7 +218,7 @@ trait Contexts { self: Analyzer => make(unit, tree, owner, scope, imports) def makeNewScope(tree: Tree, owner: Symbol): Context = - make(tree, owner, new Scope(scope)) + make(tree, owner, newNestedScope(scope)) // IDE stuff: distinguish between scopes created for typing and scopes created for naming. def make(tree: Tree, owner: Symbol): Context = diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 53e88b33c8..d0492c2f63 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -213,7 +213,7 @@ trait Implicits { /** An extractor for types of the form ? { name: (? >: argtpe <: Any*)restp } */ object HasMethodMatching { - val dummyMethod = new TermSymbol(NoSymbol, NoPosition, newTermName("typer$dummy")) + val dummyMethod = NoSymbol.newTermSymbol(newTermName("typer$dummy")) def templateArgType(argtpe: Type) = new BoundedWildcardType(TypeBounds.lower(argtpe)) def apply(name: Name, argtpes: List[Type], restpe: Type): Type = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 354b8caaa3..b19a471214 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -87,7 +87,7 @@ trait Namers extends MethodSynthesis { newNamer(context.makeNewScope(tree, sym)) } def createInnerNamer() = { - newNamer(context.make(context.tree, owner, new Scope)) + newNamer(context.make(context.tree, owner, newScope)) } def createPrimaryConstructorParameterNamer: Namer = { //todo: can we merge this with SCCmode? val classContext = context.enclClass @@ -832,7 +832,7 @@ trait Namers extends MethodSynthesis { val parents = typer.parentTypes(templ) map checkParent enterSelf(templ.self) - val decls = new Scope + val decls = newScope val templateNamer = newNamer(context.make(templ, clazz, decls)) templateNamer enterSyms templ.body diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 4104803194..b28a717049 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -1471,7 +1471,7 @@ defined class Foo */ def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1; // assert(owner ne null) // assert(owner ne NoSymbol) - new TermSymbol(NoSymbol, pos, vpmName.counted(prefix, ctr)) setInfo repackExistential(tp) + NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp) } def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match { diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 112aa47114..ee2e292bba 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -955,7 +955,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R // Forward reference checking --------------------------------------------------- class LevelInfo(val outer: LevelInfo) { - val scope: Scope = if (outer eq null) new Scope else new Scope(outer.scope) + val scope: Scope = if (outer eq null) newScope else newNestedScope(outer.scope) var maxindex: Int = Int.MinValue var refpos: Position = _ var refsym: Symbol = _ diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index b4221365be..d6248891a2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1360,7 +1360,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { assert(clazz != NoSymbol) reenterTypeParams(cdef.tparams) val tparams1 = cdef.tparams mapConserve (typedTypeDef) - val impl1 = newTyper(context.make(cdef.impl, clazz, new Scope)) + val impl1 = newTyper(context.make(cdef.impl, clazz, newScope)) .typedTemplate(cdef.impl, parentTypes(cdef.impl)) val impl2 = finishMethodSynthesis(impl1, clazz, context) if ((clazz != ClassfileAnnotationClass) && @@ -1395,7 +1395,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val typedMods = removeAnnotations(mdef.mods) assert(clazz != NoSymbol, mdef) - val typer0 = newTyper(context.make(mdef.impl, clazz, new Scope)) + val typer0 = newTyper(context.make(mdef.impl, clazz, newScope)) val impl1 = typer0.typedTemplate(mdef.impl, { parentTypes(mdef.impl) ++ ( if (linkedClass == NoSymbol || !linkedClass.isSerializable || clazz.isSerializable) Nil @@ -3983,7 +3983,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val parents1 = templ.parents mapConserve (typedType(_, mode)) if (parents1 exists (_.tpe.isError)) tree setType ErrorType else { - val decls = new Scope + val decls = newScope //Console.println("Owner: " + context.enclClass.owner + " " + context.enclClass.owner.id) val self = refinedType(parents1 map (_.tpe), context.enclClass.owner, decls, templ.pos) newTyper(context.make(templ, self.typeSymbol, decls)).typedRefinement(templ.body) diff --git a/src/detach/plugin/scala/tools/detach/Detach.scala b/src/detach/plugin/scala/tools/detach/Detach.scala index e9cd474b82..fee2c5a273 100644 --- a/src/detach/plugin/scala/tools/detach/Detach.scala +++ b/src/detach/plugin/scala/tools/detach/Detach.scala @@ -735,7 +735,7 @@ abstract class Detach extends PluginComponent iface.sourceFile = clazz.sourceFile iface setFlag (ABSTRACT | TRAIT | INTERFACE) // Java interface val iparents = List(ObjectClass.tpe, RemoteClass.tpe, ScalaObjectClass.tpe) - iface setInfo ClassInfoType(iparents, new Scope, iface) + iface setInfo ClassInfoType(iparents, newScope, iface) // methods must throw RemoteException iface addAnnotation remoteAnnotationInfo @@ -749,7 +749,7 @@ abstract class Detach extends PluginComponent // Variant 2: un-/exportObject //val cparents = List(ObjectClass.tpe, iface.tpe, // UnreferencedClass.tpe, ScalaObjectClass.tpe) - iclaz setInfo ClassInfoType(cparents, new Scope, iclaz) + iclaz setInfo ClassInfoType(cparents, newScope, iclaz) val proxy = (iface, iclaz, new mutable.HashMap[Symbol, Symbol]) proxies(clazz) = proxy proxy diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala index 6b480ab83d..08071660a2 100755 --- a/src/library/scala/reflect/api/StandardDefinitions.scala +++ b/src/library/scala/reflect/api/StandardDefinitions.scala @@ -12,9 +12,7 @@ trait StandardDefinitions { self: Universe => abstract class AbsDefinitions { // outer packages and their classes - // Under consideration - // def RootPackage: Symbol - + def RootPackage: Symbol def RootClass: Symbol def EmptyPackage: Symbol def EmptyPackageClass: Symbol diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala index 4b959649fd..6185a788ae 100755 --- a/src/library/scala/reflect/api/Types.scala +++ b/src/library/scala/reflect/api/Types.scala @@ -20,7 +20,7 @@ trait Types { self: Universe => /** The collection of declarations in this type */ - def allDeclarations: Iterable[Symbol] + def declarations: Iterable[Symbol] /** The member with given name, either directly declared or inherited, * an OverloadedSymbol if several exist, NoSymbol if none exist. @@ -36,7 +36,7 @@ trait Types { self: Universe => * Members appear in the linearization order of their owners. * Members with the same owner appear in reverse order of their declarations. */ - def allMembers: Iterable[Symbol] + def members: Iterable[Symbol] /** An iterable containing all non-private members of this type (directly declared or inherited) * Members appear in the linearization order of their owners. @@ -125,19 +125,23 @@ trait Types { self: Universe => /** Does this type contain a reference to given symbol? */ def contains(sym: Symbol): Boolean - } - /** This class declares methods that are visible in a `SingleType`. - */ - trait AbsSingletonType extends AbsType { + /** If this is a compound type, the list of its parent types; + * otherwise the empty list + */ + def parents: List[Type] - /** The type underlying a singleton type */ + /** If this is a singleton type, returns the type underlying it; + * otherwise returns this type itself. + */ def underlying: Type - /** Widen from singleton type to its underlying non-singleton - * base type by applying one or more `underlying` dereferences, - * identity for all other types. + /** If this is a singleton type, widen it to its nearest underlying non-singleton + * base type by applying one or more `underlying` dereferences. + * If this is not a singlecon type, returns this type itself. * + * Example: + * * class Outer { class C ; val x: C } * val o: Outer * .widen = o.C @@ -145,19 +149,6 @@ trait Types { self: Universe => def widen: Type } - /** This class declares methods that are visible in a `CompoundType` (i.e. - * a class/trait/object template or refined type of the form - * {{{ - * P_1 with ... with P_m { D_1; ...; D_n } - * }}} - * P_n - */ - trait AbsCompoundType extends AbsType { - - /** The list of parent types of this compound type */ - def parents: List[Type] - } - /** The type of Scala types, and also Scala type signatures. * (No difference is internally made between the two). */ @@ -293,7 +284,7 @@ trait Types { self: Universe => /** A subtype of Type representing refined types as well as `ClassInfo` signatures. */ - type CompoundType <: /*AbsCompoundType with*/ Type + type CompoundType <: Type /** The `RefinedType` type defines types of any of the forms on the left, * with their RefinedType representations to the right. diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala index b91f122a23..2b30e29bb3 100644 --- a/test/files/run/reflection-implClass.scala +++ b/test/files/run/reflection-implClass.scala @@ -8,19 +8,19 @@ object Test extends App with Outer { import scala.reflect.mirror - assert(mirror.classToSymbol(manifest[Foo].erasure).info.declaration(mirror.newTermName("bar")).info == - mirror.classToSymbol(manifest[Bar].erasure).info.declaration(mirror.newTermName("foo")).info) + assert(mirror.classToSymbol(manifest[Foo].erasure).typeSig.declaration(mirror.newTermName("bar")).typeSig == + mirror.classToSymbol(manifest[Bar].erasure).typeSig.declaration(mirror.newTermName("foo")).typeSig) val s1 = implClass(manifest[Foo].erasure) assert(s1 != mirror.NoSymbol) - assert(s1.info != mirror.NoType) - assert(s1.companionModule.info != mirror.NoType) - assert(s1.companionModule.info.declaration(mirror.newTermName("bar")) != mirror.NoSymbol) + assert(s1.typeSig != mirror.NoType) + assert(s1.companionModule.typeSig != mirror.NoType) + assert(s1.companionModule.typeSig.declaration(mirror.newTermName("bar")) != mirror.NoSymbol) val s2 = implClass(manifest[Bar].erasure) assert(s2 != mirror.NoSymbol) - assert(s2.info != mirror.NoType) - assert(s2.companionModule.info != mirror.NoType) - assert(s2.companionModule.info.declaration(mirror.newTermName("foo")) != mirror.NoSymbol) + assert(s2.typeSig != mirror.NoType) + assert(s2.companionModule.typeSig != mirror.NoType) + assert(s2.companionModule.typeSig.declaration(mirror.newTermName("foo")) != mirror.NoSymbol) def implClass(clazz: Class[_]) = { val implClass = Class.forName(clazz.getName + "$class") mirror.classToSymbol(implClass) -- cgit v1.2.3 From 4abec1f64da57268ada7126f22894d1b50ebdbd8 Mon Sep 17 00:00:00 2001 From: aleksandar Date: Wed, 25 Jan 2012 15:34:18 +0100 Subject: Fix for SI-5375. Changed CompositeThrowable to inherit Exception instead of Throwable. A few minor fixes for the jdk1.5 parallel collection tasks. --- .../collection/parallel/ParIterableLike.scala | 3 +- src/library/scala/collection/parallel/Tasks.scala | 57 ++++++++++++---------- .../scala/collection/parallel/package.scala | 2 +- test/files/run/si5375.check | 1 + test/files/run/si5375.scala | 19 ++++++++ 5 files changed, 55 insertions(+), 27 deletions(-) create mode 100644 test/files/run/si5375.check create mode 100644 test/files/run/si5375.scala diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 90b64c17f9..390bd72ab5 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -895,7 +895,8 @@ self: ParIterableLike[T, Repr, Sequential] => @volatile var result: R1 = null.asInstanceOf[R1] def map(r: R): R1 def leaf(prevr: Option[R1]) = { - result = map(executeAndWaitResult(inner)) + val initialResult = executeAndWaitResult(inner) + result = map(initialResult) } private[parallel] override def signalAbort() { inner.signalAbort diff --git a/src/library/scala/collection/parallel/Tasks.scala b/src/library/scala/collection/parallel/Tasks.scala index 873291fb2d..b705909cad 100644 --- a/src/library/scala/collection/parallel/Tasks.scala +++ b/src/library/scala/collection/parallel/Tasks.scala @@ -88,7 +88,7 @@ trait Tasks { if (this.throwable == null && that.throwable == null && (this.result == null || that.result == null)) { println("This: " + this + ", thr=" + this.throwable + "; merged with " + that + ", thr=" + that.throwable) } else if (this.throwable != null || that.throwable != null) { - println("merging this thr: " + this.throwable + " with " + that + ", thr=" + that.throwable) + println("merging this: " + this + " with thr: " + this.throwable + " with " + that + ", thr=" + that.throwable) } } @@ -118,7 +118,7 @@ trait Tasks { /** Try to cancel the task. * @return `true` if cancellation is successful. */ - def tryCancel: Boolean + def tryCancel(): Boolean /** If the task has been cancelled successfully, those syncing on it may * automatically be notified, depending on the implementation. If they * aren't, this release method should be called after processing the @@ -161,32 +161,39 @@ trait AdaptiveWorkStealingTasks extends Tasks { def split: Seq[TaskImpl[R, Tp]] - def compute() = if (body.shouldSplitFurther) internal else body.tryLeaf(None) + def compute() = if (body.shouldSplitFurther) { + internal() + release() + } else { + body.tryLeaf(None) + release() + } def internal() = { var last = spawnSubtasks() - + last.body.tryLeaf(None) + last.release() body.result = last.body.result body.throwable = last.body.throwable - + while (last.next != null) { // val lastresult = Option(last.body.result) val beforelast = last last = last.next - if (last.tryCancel) { + if (last.tryCancel()) { // println("Done with " + beforelast.body + ", next direct is " + last.body) last.body.tryLeaf(Some(body.result)) - last.release + last.release() } else { // println("Done with " + beforelast.body + ", next sync is " + last.body) - last.sync + last.sync() } // println("Merging " + body + " with " + last.body) body.tryMerge(last.body.repr) } } - + def spawnSubtasks() = { var last: TaskImpl[R, Tp] = null var head: TaskImpl[R, Tp] = this @@ -196,7 +203,7 @@ trait AdaptiveWorkStealingTasks extends Tasks { for (t <- subtasks.tail.reverse) { t.next = last last = t - t.start + t.start() } } while (head.body.shouldSplitFurther); head.next = last @@ -230,12 +237,12 @@ trait ThreadPoolTasks extends Tasks { // utb: var future: Future[_] = null @volatile var owned = false @volatile var completed = false - + def start() = synchronized { // debuglog("Starting " + body) // utb: future = executor.submit(this) executor.synchronized { - incrTasks + incrTasks() executor.submit(this) } } @@ -249,9 +256,9 @@ trait ThreadPoolTasks extends Tasks { //assert(executor.getCorePoolSize == (coresize + 1)) } } - if (!completed) this.wait + while (!completed) this.wait } - def tryCancel = synchronized { + def tryCancel() = synchronized { // utb: future.cancel(false) if (!owned) { // debuglog("Cancelling " + body) @@ -259,7 +266,7 @@ trait ThreadPoolTasks extends Tasks { true } else false } - def run = { + def run() = { // utb: compute var isOkToRun = false synchronized { @@ -270,17 +277,17 @@ trait ThreadPoolTasks extends Tasks { } if (isOkToRun) { // debuglog("Running body of " + body) - compute - release + compute() } else { // just skip // debuglog("skipping body of " + body) } } - override def release = synchronized { + override def release() = synchronized { + //println("releasing: " + this + ", body: " + this.body) completed = true executor.synchronized { - decrTasks + decrTasks() } this.notifyAll } @@ -305,10 +312,10 @@ trait ThreadPoolTasks extends Tasks { val t = newTaskImpl(task) // debuglog("-----------> Executing without wait: " + task) - t.start + t.start() () => { - t.sync + t.sync() t.body.forwardThrowable t.body.result } @@ -318,9 +325,9 @@ trait ThreadPoolTasks extends Tasks { val t = newTaskImpl(task) // debuglog("-----------> Executing with wait: " + task) - t.start - - t.sync + t.start() + + t.sync() t.body.forwardThrowable t.body.result } @@ -369,7 +376,7 @@ trait FutureThreadPoolTasks extends Tasks { def sync() = future.get def tryCancel = false def run = { - compute + compute() } } diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index addc366072..f152629c50 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -117,7 +117,7 @@ package parallel { /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ final case class CompositeThrowable( val throwables: Set[Throwable] - ) extends Throwable( + ) extends Exception( "Multiple exceptions thrown during a parallel computation: " + throwables.map(t => t + "\n" + t.getStackTrace.take(10).++("...").mkString("\n")).mkString("\n\n") ) diff --git a/test/files/run/si5375.check b/test/files/run/si5375.check new file mode 100644 index 0000000000..7d3002ffda --- /dev/null +++ b/test/files/run/si5375.check @@ -0,0 +1 @@ +Composite throwable \ No newline at end of file diff --git a/test/files/run/si5375.scala b/test/files/run/si5375.scala new file mode 100644 index 0000000000..e4b329deae --- /dev/null +++ b/test/files/run/si5375.scala @@ -0,0 +1,19 @@ + + + +import collection.parallel.CompositeThrowable + + + +object Test { + + def main(args: Array[String]) { + val foos = (1 to 1000) toSeq; + try { + foos.par.map(i => if (i % 37 == 0) sys.error("i div 37") else i) + } catch { + case CompositeThrowable(thr) => println("Composite throwable") + } + } + +} -- cgit v1.2.3 From 97eca9b09af84e881aef347ff32441a1037e36f6 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 17:20:54 +0100 Subject: Making Definitions thread-safe by replacing a bunch of vars with lazy vals. --- .../scala/reflect/internal/Definitions.scala | 111 ++++++++++----------- 1 file changed, 53 insertions(+), 58 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index a733f0d1ee..1386bce59a 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -609,7 +609,7 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val ValueTypeClass: Symbol = getClass(sn.ValueType) // System.MulticastDelegate lazy val DelegateClass: Symbol = getClass(sn.Delegate) - var Delegate_scalaCallers: List[Symbol] = List() + var Delegate_scalaCallers: List[Symbol] = List() // Syncnote: No protection necessary yet as only for .NET where reflection is not supported. // Symbol -> (Symbol, Type): scalaCaller -> (scalaMethodSym, DelegateType) // var Delegate_scalaCallerInfos: HashMap[Symbol, (Symbol, Type)] = _ lazy val Delegate_scalaCallerTargets: mutable.HashMap[Symbol, Symbol] = mutable.HashMap() @@ -629,31 +629,47 @@ trait Definitions extends reflect.api.StandardDefinitions { case _ => false }) } - + // members of class scala.Any - var Any_== : Symbol = _ - var Any_!= : Symbol = _ - var Any_equals : Symbol = _ - var Any_hashCode : Symbol = _ - var Any_toString : Symbol = _ - var Any_getClass : Symbol = _ - var Any_isInstanceOf: Symbol = _ - var Any_asInstanceOf: Symbol = _ - var Any_## : Symbol = _ - - // members of class java.lang.{Object, String} - var Object_eq : Symbol = _ - var Object_ne : Symbol = _ - var Object_== : Symbol = _ - var Object_!= : Symbol = _ - var Object_## : Symbol = _ - var Object_synchronized: Symbol = _ + lazy val Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) + lazy val Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL) + lazy val Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype) + lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype) + lazy val Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype) + lazy val Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL) + + // Any_getClass requires special handling. The return type is determined on + // a per-call-site basis as if the function being called were actually: + // + // // Assuming `target.getClass()` + // def getClass[T](target: T): Class[_ <: T] + // + // Since getClass is not actually a polymorphic method, this requires compiler + // participation. At the "Any" level, the return type is Class[_] as it is in + // java.lang.Object. Java also special cases the return type. + lazy val Any_getClass = + newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED) + lazy val Any_isInstanceOf = newPolyMethod( + AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL + lazy val Any_asInstanceOf = newPolyMethod( + AnyClass, nme.asInstanceOf_, tparam => NullaryMethodType(tparam.typeConstructor)) setFlag FINAL + + // members of class java.lang.{ Object, String } + lazy val Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL) + lazy val Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL) + lazy val Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL) + lazy val Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL) + lazy val Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL) + lazy val Object_synchronized = newPolyMethodCon( + ObjectClass, nme.synchronized_, + tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL lazy val Object_isInstanceOf = newPolyMethod( ObjectClass, newTermName("$isInstanceOf"), tparam => MethodType(List(), booltype)) setFlag (FINAL | SYNTHETIC) lazy val Object_asInstanceOf = newPolyMethod( ObjectClass, newTermName("$asInstanceOf"), tparam => MethodType(List(), tparam.typeConstructor)) setFlag (FINAL | SYNTHETIC) + lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL) def Object_getClass = getMember(ObjectClass, nme.getClass_) def Object_clone = getMember(ObjectClass, nme.clone_) @@ -664,7 +680,6 @@ trait Definitions extends reflect.api.StandardDefinitions { def Object_hashCode = getMember(ObjectClass, nme.hashCode_) def Object_toString = getMember(ObjectClass, nme.toString_) - var String_+ : Symbol = _ // boxed classes lazy val ObjectRefClass = getRequiredClass("scala.runtime.ObjectRef") @@ -940,43 +955,7 @@ trait Definitions extends reflect.api.StandardDefinitions { RootClass.info.decls enter EmptyPackage RootClass.info.decls enter RootPackage - - // members of class scala.Any - Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) - Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL) - Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype) - Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype) - Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype) - Any_## = newMethod(AnyClass, nme.HASHHASH, Nil, inttype, FINAL) - - // Any_getClass requires special handling. The return type is determined on - // a per-call-site basis as if the function being called were actually: - // - // // Assuming `target.getClass()` - // def getClass[T](target: T): Class[_ <: T] - // - // Since getClass is not actually a polymorphic method, this requires compiler - // participation. At the "Any" level, the return type is Class[_] as it is in - // java.lang.Object. Java also special cases the return type. - Any_getClass = - newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED) - Any_isInstanceOf = newPolyMethod( - AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL - Any_asInstanceOf = newPolyMethod( - AnyClass, nme.asInstanceOf_, tparam => NullaryMethodType(tparam.typeConstructor)) setFlag FINAL - - // members of class java.lang.{ Object, String } - Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL) - Object_== = newMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL) - Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL) - Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL) - Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL) - Object_synchronized = newPolyMethodCon( - ObjectClass, nme.synchronized_, - tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL - - String_+ = newMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL) - + val forced = List( // force initialization of every symbol that is entered as a side effect AnnotationDefaultAttr, // #2264 RepeatedParamClass, @@ -989,8 +968,24 @@ trait Definitions extends reflect.api.StandardDefinitions { NothingClass, SingletonClass, EqualsPatternClass, + Any_==, + Any_!=, + Any_equals, + Any_hashCode, + Any_toString, + Any_getClass, + Any_isInstanceOf, + Any_asInstanceOf, + Any_##, + Object_eq, + Object_ne, + Object_==, + Object_!=, + Object_##, + Object_synchronized, Object_isInstanceOf, - Object_asInstanceOf + Object_asInstanceOf, + String_+ ) /** Removing the anyref parent they acquire from having a source file. -- cgit v1.2.3 From 469af446c7f739022313011f822bd52c1c5637fd Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 17:22:11 +0100 Subject: Protecting the constructors of Scopes and Symbols so that everyone is forced to go through the factory method, which adds on synchronization when run under reflection. --- src/compiler/scala/reflect/internal/BaseTypeSeqs.scala | 6 +++++- src/compiler/scala/reflect/internal/Scopes.scala | 8 ++++++-- 2 files changed, 11 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala index 53e89b3d1e..9e5c93753f 100644 --- a/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala +++ b/src/compiler/scala/reflect/internal/BaseTypeSeqs.scala @@ -32,7 +32,11 @@ trait BaseTypeSeqs { protected def newBaseTypeSeq(parents: List[Type], elems: Array[Type]) = new BaseTypeSeq(parents, elems) - class BaseTypeSeq(private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { + /** Note: constructor is protected to force everyone to use the factory method newBaseTypeSeq instead. + * This is necessary because when run from reflection every base type sequence needs to have a + * SynchronizedBaseTypeSeq as mixin. + */ + class BaseTypeSeq protected[BaseTypeSeqs] (private[BaseTypeSeqs] val parents: List[Type], private[BaseTypeSeqs] val elems: Array[Type]) { self => incCounter(baseTypeSeqCount) incCounter(baseTypeSeqLenTotal, elems.length) diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala index 8861386bc8..54d3de09cd 100644 --- a/src/compiler/scala/reflect/internal/Scopes.scala +++ b/src/compiler/scala/reflect/internal/Scopes.scala @@ -37,9 +37,13 @@ trait Scopes extends api.Scopes { self: SymbolTable => def unapplySeq(decls: Scope): Some[Seq[Symbol]] = Some(decls.toList) } - class Scope(initElems: ScopeEntry = null) extends Iterable[Symbol] { + /** Note: constructor is protected to force everyone to use the factory methods newScope or newNestedScope instead. + * This is necessary because when run from reflection every scope needs to have a + * SynchronizedScope as mixin. + */ + class Scope protected[Scopes] (initElems: ScopeEntry = null) extends Iterable[Symbol] { - def this(base: Scope) = { + protected[Scopes] def this(base: Scope) = { this(base.elems) nestinglevel = base.nestinglevel + 1 } -- cgit v1.2.3 From 0c0ba99ce1e1488f81b63225c5dd6878d6b836b3 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 17:22:50 +0100 Subject: More work on making reflection thread-safe. --- .../reflect/internal/AnnotationCheckers.scala | 1 + .../scala/reflect/internal/AnnotationInfos.scala | 2 +- .../scala/reflect/internal/InfoTransformers.scala | 2 + .../scala/reflect/runtime/ConversionUtil.scala | 48 ++++++++++++---------- .../scala/reflect/runtime/JavaToScala.scala | 2 +- src/compiler/scala/reflect/runtime/Loaders.scala | 2 +- .../scala/reflect/runtime/SynchronizedOps.scala | 1 - src/library/scala/reflect/api/Trees.scala | 4 +- 8 files changed, 35 insertions(+), 27 deletions(-) diff --git a/src/compiler/scala/reflect/internal/AnnotationCheckers.scala b/src/compiler/scala/reflect/internal/AnnotationCheckers.scala index 666c1d74cb..449b0ca0bc 100644 --- a/src/compiler/scala/reflect/internal/AnnotationCheckers.scala +++ b/src/compiler/scala/reflect/internal/AnnotationCheckers.scala @@ -49,6 +49,7 @@ trait AnnotationCheckers { def adaptAnnotations(tree: Tree, mode: Int, pt: Type): Tree = tree } + // Syncnote: Annotation checkers inaccessible to reflection, so no sync in var necessary. /** The list of annotation checkers that have been registered */ private var annotationCheckers: List[AnnotationChecker] = Nil diff --git a/src/compiler/scala/reflect/internal/AnnotationInfos.scala b/src/compiler/scala/reflect/internal/AnnotationInfos.scala index 255e69c3c6..c3dde3e6d1 100644 --- a/src/compiler/scala/reflect/internal/AnnotationInfos.scala +++ b/src/compiler/scala/reflect/internal/AnnotationInfos.scala @@ -178,7 +178,7 @@ trait AnnotationInfos extends api.AnnotationInfos { self: SymbolTable => private var rawpos: Position = NoPosition def pos = rawpos - def setPos(pos: Position): this.type = { + def setPos(pos: Position): this.type = { // Syncnote: Setpos inaccessible to reflection, so no sync in rawpos necessary. rawpos = pos this } diff --git a/src/compiler/scala/reflect/internal/InfoTransformers.scala b/src/compiler/scala/reflect/internal/InfoTransformers.scala index 9c54b1b4cd..96d9d8f076 100644 --- a/src/compiler/scala/reflect/internal/InfoTransformers.scala +++ b/src/compiler/scala/reflect/internal/InfoTransformers.scala @@ -9,6 +9,8 @@ package internal trait InfoTransformers { self: SymbolTable => + /* Syncnote: This should not need to be protected, as reflection does not run in multiple phases. + */ abstract class InfoTransformer { var prev: InfoTransformer = this var next: InfoTransformer = this diff --git a/src/compiler/scala/reflect/runtime/ConversionUtil.scala b/src/compiler/scala/reflect/runtime/ConversionUtil.scala index bd40200310..e75fd78590 100644 --- a/src/compiler/scala/reflect/runtime/ConversionUtil.scala +++ b/src/compiler/scala/reflect/runtime/ConversionUtil.scala @@ -17,36 +17,42 @@ trait ConversionUtil { self: SymbolTable => private val toScalaMap = new HashMap[J, S] private val toJavaMap = new HashMap[S, J] - def enter(j: J, s: S) = { + def enter(j: J, s: S) = synchronized { debugInfo("cached: "+j+"/"+s) toScalaMap(j) = s toJavaMap(s) = j } - def toScala(key: J)(body: => S): S = toScalaMap get key match { - case Some(v) => - v - case none => - val result = body - enter(key, result) - result + def toScala(key: J)(body: => S): S = synchronized { + toScalaMap get key match { + case Some(v) => + v + case none => + val result = body + enter(key, result) + result + } } - def toJava(key: S)(body: => J): J = toJavaMap get key match { - case Some(v) => - v - case none => - val result = body - enter(result, key) - result + def toJava(key: S)(body: => J): J = synchronized { + toJavaMap get key match { + case Some(v) => + v + case none => + val result = body + enter(result, key) + result + } } - def toJavaOption(key: S)(body: => Option[J]): Option[J] = toJavaMap get key match { - case None => - val result = body - for (value <- result) enter(value, key) - result - case some => some + def toJavaOption(key: S)(body: => Option[J]): Option[J] = synchronized { + toJavaMap get key match { + case None => + val result = body + for (value <- result) enter(value, key) + result + case some => some + } } } diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala index 61b03a9a29..6e4b6cef30 100644 --- a/src/compiler/scala/reflect/runtime/JavaToScala.scala +++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala @@ -175,7 +175,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => load(sym) completeRest() } - def completeRest(): Unit = { + def completeRest(): Unit = self.synchronized { val tparams = clazz.rawInfo.typeParams val parents = try { diff --git a/src/compiler/scala/reflect/runtime/Loaders.scala b/src/compiler/scala/reflect/runtime/Loaders.scala index 0a5a21de1e..4b35a5b37e 100644 --- a/src/compiler/scala/reflect/runtime/Loaders.scala +++ b/src/compiler/scala/reflect/runtime/Loaders.scala @@ -99,7 +99,7 @@ trait Loaders { self: SymbolTable => class PackageScope(pkgClass: Symbol) extends Scope() with SynchronizedScope { assert(pkgClass.isType) - private var negatives = mutable.Set[Name]() + private val negatives = mutable.Set[Name]() // Syncnote: Performance only, so need not be protected. override def lookupEntry(name: Name): ScopeEntry = { val e = super.lookupEntry(name) if (e != null) diff --git a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala index 98694c2ddf..72adbd4004 100644 --- a/src/compiler/scala/reflect/runtime/SynchronizedOps.scala +++ b/src/compiler/scala/reflect/runtime/SynchronizedOps.scala @@ -35,7 +35,6 @@ trait SynchronizedOps extends internal.SymbolTable override def newScope = new Scope() with SynchronizedScope override def newNestedScope(outer: Scope): Scope = new Scope(outer) with SynchronizedScope -// override def newScopeWith(elems: ScopeEntry): Scope = new Scope(elems) with SynchronizedScope trait SynchronizedScope extends Scope { override def isEmpty: Boolean = synchronized { super.isEmpty } diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 2394925657..0757de16ca 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -8,9 +8,9 @@ package api import scala.collection.mutable.ListBuffer -//import scala.tools.nsc.util.{ FreshNameCreator, HashSet, SourceFile } +// Syncnote: Trees are currently not thread-safe. -trait Trees /*extends reflect.generic.Trees*/ { self: Universe => +trait Trees { self: Universe => private[scala] var nodeCount = 0 -- cgit v1.2.3 From 3a09b7845c3b16906c9bea9b9deccc545b7a4cb7 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 25 Jan 2012 17:40:03 +0100 Subject: Tightening of constructors in Symbols, to force everyone to use the factory methods that take account of synchronization when run under reflection. --- src/compiler/scala/reflect/internal/Symbols.scala | 20 ++++++++++---------- 1 file changed, 10 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index ecd2de6f56..56c56df24d 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -72,7 +72,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** The class for all symbols */ - abstract class Symbol(initOwner: Symbol, initPos: Position, initName: Name) + abstract class Symbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: Name) extends AbsSymbolImpl with HasFlags with Annotatable[Symbol] { @@ -2064,7 +2064,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** A class for term symbols */ - class TermSymbol(initOwner: Symbol, initPos: Position, initName: TermName) + class TermSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends Symbol(initOwner, initPos, initName) { final override def isTerm = true @@ -2157,7 +2157,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** A class for module symbols */ - class ModuleSymbol(initOwner: Symbol, initPos: Position, initName: TermName) + class ModuleSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends TermSymbol(initOwner, initPos, initName) { private var flatname: TermName = null @@ -2180,7 +2180,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** A class for method symbols */ - class MethodSymbol(initOwner: Symbol, initPos: Position, initName: TermName) + class MethodSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TermName) extends TermSymbol(initOwner, initPos, initName) { private var mtpePeriod = NoPeriod private var mtpePre: Type = _ @@ -2206,7 +2206,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } - class AliasTypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) + class AliasTypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends TypeSymbol(initOwner, initPos, initName) { // Temporary programmatic help tracking down who might do such a thing override def setFlag(mask: Long): this.type = { @@ -2247,7 +2247,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** A class of type symbols. Alias and abstract types are direct instances * of this class. Classes are instances of a subclass. */ - abstract class TypeSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) { + abstract class TypeSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends Symbol(initOwner, initPos, initName) { privateWithin = NoSymbol private var tyconCache: Type = null private var tyconRunId = NoRunId @@ -2385,7 +2385,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * * origin.isInstanceOf[Symbol] == !hasFlag(EXISTENTIAL) */ - class TypeSkolem(initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef) + class TypeSkolem protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName, origin: AnyRef) extends TypeSymbol(initOwner, initPos, initName) { /** The skolemization level in place when the skolem was constructed */ @@ -2414,7 +2414,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** A class for class symbols */ - class ClassSymbol(initOwner: Symbol, initPos: Position, initName: TypeName) + class ClassSymbol protected[Symbols] (initOwner: Symbol, initPos: Position, initName: TypeName) extends TypeSymbol(initOwner, initPos, initName) { private[this] var flatname: TypeName = null private[this] var source: AbstractFileType = null @@ -2511,7 +2511,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * Note: Not all module classes are of this type; when unpickled, we get * plain class symbols! */ - class ModuleClassSymbol(owner: Symbol, pos: Position, name: TypeName) + class ModuleClassSymbol protected[Symbols] (owner: Symbol, pos: Position, name: TypeName) extends ClassSymbol(owner, pos, name) { private var module: Symbol = null private var implicitMembersCacheValue: List[Symbol] = List() @@ -2544,7 +2544,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } /** An object representing a missing symbol */ - class NoSymbol extends Symbol(null, NoPosition, nme.NO_NAME) { + class NoSymbol protected[Symbols]() extends Symbol(null, NoPosition, nme.NO_NAME) { synchronized { setInfo(NoType) privateWithin = this -- cgit v1.2.3 From e234978dfddf5f4871312eb7744ac3b133ad00da Mon Sep 17 00:00:00 2001 From: aleksandar Date: Wed, 25 Jan 2012 20:17:52 +0100 Subject: Refine fix for SI-5374 - make list deserialization backward-compatible. This is done by structurally serializing list nodes, but prepending a special `ListSerializationStart` symbol ahead of the list. If this symbol is not in the object input stream, the deserialization reverts to the old mode. Note there is not much to be done for list buffers - their serialization was broken before, so legacy serialized list buffers are no longer deserializable. However, their serialVersionUID was changed to reflect this, so deserializing a legacy list buffer should fail fast. --- src/library/scala/collection/immutable/List.scala | 48 +++++++++++++++++++- .../scala/collection/mutable/ListBuffer.scala | 4 +- test/files/run/si5374.check | 5 ++- test/files/run/si5374.scala | 52 ++++++++++++++++++---- 4 files changed, 95 insertions(+), 14 deletions(-) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index e9ecc75e0f..f789de9fac 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -14,6 +14,7 @@ package immutable import generic._ import mutable.{Builder, ListBuffer} import annotation.tailrec +import java.io._ /** A class for immutable linked lists representing ordered collections * of elements of type. @@ -315,8 +316,46 @@ final case class ::[B](private var hd: B, private[scala] var tl: List[B]) extend override def head : B = hd override def tail : List[B] = tl override def isEmpty: Boolean = false - - + + private def writeObject(out: ObjectOutputStream) { + out.writeObject(ListSerializeStart) // needed to differentiate with the legacy `::` serialization + out.writeObject(this.hd) + out.writeObject(this.tl) + } + + private def readObject(in: ObjectInputStream) { + val obj = in.readObject() + if (obj == ListSerializeStart) { + this.hd = in.readObject().asInstanceOf[B] + this.tl = in.readObject().asInstanceOf[List[B]] + } else oldReadObject(in, obj) + } + + /* The oldReadObject method exists here for compatibility reasons. + * :: objects used to be serialized by serializing all the elements to + * the output stream directly, but this was broken (see SI-5374). + */ + private def oldReadObject(in: ObjectInputStream, firstObject: AnyRef) { + hd = firstObject.asInstanceOf[B] + assert(hd != ListSerializeEnd) + var current: ::[B] = this + while (true) in.readObject match { + case ListSerializeEnd => + current.tl = Nil + return + case a : Any => + val list : ::[B] = new ::(a.asInstanceOf[B], Nil) + current.tl = list + current = list + } + } + + private def oldWriteObject(out: ObjectOutputStream) { + var xs: List[B] = this + while (!xs.isEmpty) { out.writeObject(xs.head); xs = xs.tail } + out.writeObject(ListSerializeEnd) + } + } /** $factoryInfo @@ -580,6 +619,11 @@ object List extends SeqFactory[List] { } } +/** Only used for list serialization */ +@SerialVersionUID(0L - 8476791151975527571L) +private[scala] case object ListSerializeStart + /** Only used for list serialization */ @SerialVersionUID(0L - 8476791151975527571L) private[scala] case object ListSerializeEnd + diff --git a/src/library/scala/collection/mutable/ListBuffer.scala b/src/library/scala/collection/mutable/ListBuffer.scala index eb871135df..53c876ec08 100644 --- a/src/library/scala/collection/mutable/ListBuffer.scala +++ b/src/library/scala/collection/mutable/ListBuffer.scala @@ -41,7 +41,7 @@ import java.io._ * @define mayNotTerminateInf * @define willNotTerminateInf */ -@SerialVersionUID(3419063961353022661L) +@SerialVersionUID(3419063961353022662L) final class ListBuffer[A] extends AbstractBuffer[A] with Buffer[A] @@ -399,7 +399,7 @@ final class ListBuffer[A] private def copy() { var cursor = start val limit = last0.tail - clear + clear() while (cursor ne limit) { this += cursor.head cursor = cursor.tail diff --git a/test/files/run/si5374.check b/test/files/run/si5374.check index cdf0bc7e5b..6be88d77ec 100644 --- a/test/files/run/si5374.check +++ b/test/files/run/si5374.check @@ -1,3 +1,6 @@ ListBuffer(1, 2, 3, 1) ListBuffer(1, 2, 3, 1) -ListBuffer() \ No newline at end of file +ListBuffer() +List(1, 2, 3, 4, 5) +List(1, 2, 3) +ok \ No newline at end of file diff --git a/test/files/run/si5374.scala b/test/files/run/si5374.scala index a5678c3a81..9b1671e795 100644 --- a/test/files/run/si5374.scala +++ b/test/files/run/si5374.scala @@ -11,15 +11,22 @@ object Test { def main(args: Array[String]) { ticketExample() emptyListBuffer() + list() + legacyList() + objectWithMultipleLists() } - def ticketExample() { + def inAndOut[T <: AnyRef](obj: T): T = { val baos = new ByteArrayOutputStream val oos = new ObjectOutputStream(baos) - oos.writeObject( ListBuffer(1,2,3) ) + oos.writeObject( obj ) val bais = new ByteArrayInputStream( baos.toByteArray ) val ois = new ObjectInputStream(bais) - val lb = ois.readObject.asInstanceOf[ListBuffer[Int]] + ois.readObject.asInstanceOf[T] + } + + def ticketExample() { + val lb = inAndOut(ListBuffer(1, 2, 3)) val lb2 = ListBuffer[Int]() ++= lb lb2 ++= List(1) @@ -29,14 +36,41 @@ object Test { } def emptyListBuffer() { - val baos = new ByteArrayOutputStream - val oos = new ObjectOutputStream(baos) - oos.writeObject( ListBuffer() ) - val bais = new ByteArrayInputStream( baos.toByteArray ) - val ois = new ObjectInputStream(bais) - val lb = ois.readObject.asInstanceOf[ListBuffer[Int]] + val lb = inAndOut(ListBuffer[Int]()) println(lb) } + def list() { + val l = inAndOut(List(1, 2, 3, 4, 5)) + + println(l) + } + + // this byte array corresponds to what List(1, 2, 3) used to be serialized to prior to this fix + val listBytes = Array[Byte](-84, -19, 0, 5, 115, 114, 0, 39, 115, 99, 97, 108, 97, 46, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 46, 105, 109, 109, 117, 116, 97, 98, 108, 101, 46, 36, 99, 111, 108, 111, 110, 36, 99, 111, 108, 111, 110, -118, 92, 99, 91, -10, -40, -7, 109, 3, 0, 2, 76, 0, 43, 115, 99, 97, 108, 97, 36, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 36, 105, 109, 109, 117, 116, 97, 98, 108, 101, 36, 36, 99, 111, 108, 111, 110, 36, 99, 111, 108, 111, 110, 36, 36, 104, 100, 116, 0, 18, 76, 106, 97, 118, 97, 47, 108, 97, 110, 103, 47, 79, 98, 106, 101, 99, 116, 59, 76, 0, 2, 116, 108, 116, 0, 33, 76, 115, 99, 97, 108, 97, 47, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 47, 105, 109, 109, 117, 116, 97, 98, 108, 101, 47, 76, 105, 115, 116, 59, 120, 112, 115, 114, 0, 17, 106, 97, 118, 97, 46, 108, 97, 110, 103, 46, 73, 110, 116, 101, 103, 101, 114, 18, -30, -96, -92, -9, -127, -121, 56, 2, 0, 1, 73, 0, 5, 118, 97, 108, 117, 101, 120, 114, 0, 16, 106, 97, 118, 97, 46, 108, 97, 110, 103, 46, 78, 117, 109, 98, 101, 114, -122, -84, -107, 29, 11, -108, -32, -117, 2, 0, 0, 120, 112, 0, 0, 0, 1, 115, 113, 0, 126, 0, 4, 0, 0, 0, 2, 115, 113, 0, 126, 0, 4, 0, 0, 0, 3, 115, 114, 0, 44, 115, 99, 97, 108, 97, 46, 99, 111, 108, 108, 101, 99, 116, 105, 111, 110, 46, 105, 109, 109, 117, 116, 97, 98, 108, 101, 46, 76, 105, 115, 116, 83, 101, 114, 105, 97, 108, 105, 122, 101, 69, 110, 100, 36, -118, 92, 99, 91, -9, 83, 11, 109, 2, 0, 0, 120, 112, 120) + + def legacyList() { + val bais = new ByteArrayInputStream(listBytes) + val ois = new ObjectInputStream(bais) + val l = ois.readObject() + + println(l) + } + + class Foo extends Serializable { + val head = List(1, 2, 3) + val last = head.tail.tail + def structuralSharing: Boolean = head.tail.tail eq last + + assert(structuralSharing) + } + + def objectWithMultipleLists() { + val foo = inAndOut(new Foo) + + if (foo.structuralSharing) println("ok") + else println("no structural sharing") + } + } -- cgit v1.2.3 From c94d342b385fa510882721b8b7f2070750c60f0e Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Wed, 25 Jan 2012 21:33:59 -0500 Subject: Added caching to binary resolution. * Duplicated binary repo cache in ~/.sbt/cache/scala/ * Resolved to cache before copying to local dir if jar is misisng * Does *not* check SHA in cache currently --- tools/binary-repo-lib.sh | 27 ++++++++++++++++++++++++--- 1 file changed, 24 insertions(+), 3 deletions(-) diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index 4221e3205c..3a75593f21 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -7,6 +7,8 @@ remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstr libraryJar="$(pwd)/lib/scala-library.jar" desired_ext=".desired.sha1" push_jar="$(pwd)/tools/push.jar" +# Cache dir has .sbt in it to line up with SBT build. +cache_dir="${HOME}/.sbt/cache/scala" # Checks whether or not curl is installed and issues a warning on failure. checkCurl() { @@ -126,6 +128,25 @@ pushJarFiles() { else echo "Binary changes have been pushed. You may now submit the new *${desired_ext} files to git." fi +} + +# Pulls a single binary artifact from a remote repository. +# Argument 1 - The uri to the file that should be downloaded. +# Argument 2 - SHA of the file... +# Returns: Cache location. +pullJarFileToCache() { + local uri=$1 + local sha=$2 + local cache_loc=$cache_dir/$uri + local cdir=$(dirname $cache_loc) + if [[ ! -d $cdir ]]; then + mkdir -p $cdir + fi + # TODO - Check SHA of local cache is accurate. + if [[ ! -f $cache_loc ]]; then + curlDownload $cache_loc ${remote_urlbase}/${uri} + fi + echo "$cache_loc" } # Pulls a single binary artifact from a remote repository. @@ -139,8 +160,9 @@ pullJarFile() { local jar_name=${jar#$jar_dir/} local version=${sha1% ?$jar_name} local remote_uri=${version}/${jar#$basedir/} - echo "Downloading from ${remote_urlbase}/${remote_uri}" - curlDownload $jar ${remote_urlbase}/${remote_uri} + echo "Resolving [${remote_uri}]" + local cached_file=$(pullJarFileToCache $remote_uri $version) + cp $cached_file $jar } # Pulls binary artifacts from the remote repository. @@ -152,7 +174,6 @@ pullJarFiles() { jar=${sha%$desired_ext} local valid=$(isJarFileValid $jar) if [[ "$valid" != "OK" ]]; then - echo "Obtaining [$jar] from binary repository..." pullJarFile $jar $basedir fi done -- cgit v1.2.3 From e8a198f46eedc3f540c511297bdfcab68c005221 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 26 Jan 2012 05:29:07 -0800 Subject: Put a stake in partest-alternative. I look forward to partest-emo and partest-trance. --- src/partest-alternative/README | 50 -- .../scala/tools/partest/Actions.scala | 189 ----- .../scala/tools/partest/Alarms.scala | 86 -- .../scala/tools/partest/BuildContributors.scala | 102 --- .../scala/tools/partest/Categories.scala | 70 -- .../scala/tools/partest/Compilable.scala | 106 --- .../scala/tools/partest/Config.scala | 115 --- .../scala/tools/partest/Dispatcher.scala | 162 ---- .../scala/tools/partest/Entities.scala | 74 -- .../scala/tools/partest/Housekeeping.scala | 187 ----- .../scala/tools/partest/Partest.scala | 81 -- .../scala/tools/partest/PartestSpec.scala | 104 --- .../scala/tools/partest/Properties.scala | 17 - .../scala/tools/partest/Results.scala | 121 --- .../scala/tools/partest/Runner.scala | 36 - .../scala/tools/partest/Statistics.scala | 46 -- .../scala/tools/partest/Universe.scala | 96 --- .../scala/tools/partest/ant/JavaTask.scala | 57 -- .../scala/tools/partest/antlib.xml | 3 - .../tools/partest/category/AllCategories.scala | 20 - .../scala/tools/partest/category/Analysis.scala | 64 -- .../scala/tools/partest/category/Compiler.scala | 140 ---- .../scala/tools/partest/category/Runner.scala | 108 --- .../scala/tools/partest/io/ANSIWriter.scala | 58 -- .../scala/tools/partest/io/Diff.java | 873 --------------------- .../scala/tools/partest/io/DiffPrint.java | 606 -------------- .../scala/tools/partest/io/JUnitReport.scala | 38 - .../scala/tools/partest/io/Logging.scala | 137 ---- .../scala/tools/partest/package.scala | 45 -- .../scala/tools/partest/util/package.scala | 61 -- 30 files changed, 3852 deletions(-) delete mode 100644 src/partest-alternative/README delete mode 100644 src/partest-alternative/scala/tools/partest/Actions.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Alarms.scala delete mode 100644 src/partest-alternative/scala/tools/partest/BuildContributors.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Categories.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Compilable.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Config.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Dispatcher.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Entities.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Housekeeping.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Partest.scala delete mode 100644 src/partest-alternative/scala/tools/partest/PartestSpec.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Properties.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Results.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Runner.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Statistics.scala delete mode 100644 src/partest-alternative/scala/tools/partest/Universe.scala delete mode 100644 src/partest-alternative/scala/tools/partest/ant/JavaTask.scala delete mode 100644 src/partest-alternative/scala/tools/partest/antlib.xml delete mode 100644 src/partest-alternative/scala/tools/partest/category/AllCategories.scala delete mode 100644 src/partest-alternative/scala/tools/partest/category/Analysis.scala delete mode 100644 src/partest-alternative/scala/tools/partest/category/Compiler.scala delete mode 100644 src/partest-alternative/scala/tools/partest/category/Runner.scala delete mode 100644 src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala delete mode 100644 src/partest-alternative/scala/tools/partest/io/Diff.java delete mode 100644 src/partest-alternative/scala/tools/partest/io/DiffPrint.java delete mode 100644 src/partest-alternative/scala/tools/partest/io/JUnitReport.scala delete mode 100644 src/partest-alternative/scala/tools/partest/io/Logging.scala delete mode 100644 src/partest-alternative/scala/tools/partest/package.scala delete mode 100644 src/partest-alternative/scala/tools/partest/util/package.scala diff --git a/src/partest-alternative/README b/src/partest-alternative/README deleted file mode 100644 index c7673fe2f8..0000000000 --- a/src/partest-alternative/README +++ /dev/null @@ -1,50 +0,0 @@ -If you're looking for something to read, I suggest running ../test/partest -with no arguments, which at this moment prints this: - -Usage: partest [] [ ...] - : a path to a test designator, typically a .scala file or a directory. - Examples: files/pos/test1.scala, files/res/bug785 - - Test categories: - --all run all tests (default, unless no options given) - --pos Compile files that are expected to build - --neg Compile files that are expected to fail - --run Test JVM backend - --jvm Test JVM backend - --res Run resident compiler scenarii - --buildmanager Run Build Manager scenarii - --scalacheck Run Scalacheck tests - --script Run script files - --shootout Run shootout tests - --scalap Run scalap tests - - Test "smart" categories: - --grep run all tests with a source file containing - --failed run all tests which failed on the last run - - Specifying paths and additional flags, ~ means repository root: - --rootdir path from ~ to partest (default: test) - --builddir path from ~ to test build (default: build/pack) - --srcdir path from --rootdir to sources (default: files) - --javaopts flags to java on all runs (overrides JAVA_OPTS) - --scalacopts flags to scalac on all tests (overrides SCALAC_OPTS) - --pack alias for --builddir build/pack - --quick alias for --builddir build/quick - - Options influencing output: - --trace show the individual steps taken by each test - --show-diff show diff between log and check file - --show-log show log on failures - --dry-run do not run tests, only show their traces. - --terse be less verbose (almost silent except for failures) - --verbose be more verbose (additive with --trace) - --debug maximum debugging output - --ansi print output in color - - Other options: - --timeout Timeout in seconds - --cleanup delete all stale files and dirs before run - --nocleanup do not delete any logfiles or object dirs - --stats collect and print statistics about the tests - --validate examine test filesystem for inconsistencies - --version print version diff --git a/src/partest-alternative/scala/tools/partest/Actions.scala b/src/partest-alternative/scala/tools/partest/Actions.scala deleted file mode 100644 index 9a64edeadc..0000000000 --- a/src/partest-alternative/scala/tools/partest/Actions.scala +++ /dev/null @@ -1,189 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest - -import util._ -import nsc.io._ -import scala.sys.process._ - -trait Actions { - partest: Universe => - - class TestSequence(val actions: List[TestStep]) extends AbsTestSequence { - } - - implicit def createSequence(xs: List[TestStep]) = new TestSequence(xs) - - trait ExecSupport { - self: TestEntity => - - def execEnv: Map[String, String] = { - val map = assembleEnvironment() - val cwd = execCwd.toList map ("CWD" -> _.path) - - map ++ cwd - } - def execCwd = if (commandFile.isFile) Some(sourcesDir) else None - - def runExec(args: List[String]): Boolean = { - val cmd = fromArgs(args) - - if (isVerbose) { - trace("runExec: " + execEnv.mkString("ENV(", "\n", "\n)")) - execCwd foreach (x => trace("CWD(" + x + ")")) - } - - trace("runExec: " + cmd) - isDryRun || execAndLog(cmd) - } - - /** Exec a process to run a command. Assumes 0 exit value is success. - * Of necessity, also treats no available exit value as success. - */ - protected def execAndLog(cmd: String) = (cmd #> logFile.jfile !) == 0 - } - - trait ScriptableTest { - self: TestEntity => - - /** Translates a line from a .cmds file into a teststep. - */ - def customTestStep(line: String): TestStep = { - trace("customTestStep: " + line) - val (cmd, rest) = line span (x => !Character.isWhitespace(x)) - def qualify(name: String) = sourcesDir / name path - val args = toArgs(rest) map qualify - def fail: TestStep = (_: TestEntity) => error("Parse error: did not understand '%s'" format line) - - val f: TestEntity => Boolean = cmd match { - case "scalac" => _ scalac args - case "javac" => _ javac args - case "scala" => _ runScala args - case _ => fail - } - f - } - } - - trait CompilableTest extends CompileExecSupport { - self: TestEntity => - - def sourceFiles = location.walk collect { case f: File if isJavaOrScala(f) => f } toList - def allSources = sourceFiles map (_.path) - def scalaSources = sourceFiles filter isScala map (_.path) - def javaSources = sourceFiles filter isJava map (_.path) - - /** If there are mixed java and scala files, the standard compilation - * sequence is: - * - * scalac with all files - * javac with only java files - * scalac with only scala files - * - * This should be expanded to encompass other strategies so we know how - * well they're working or not working - notably, it would be very useful - * to know exactly when and how two-pass compilation fails. - */ - def compile() = { - trace("compile: " + sourceFiles) - - def compileJava() = javac(javaSources) - def compileScala() = scalac(scalaSources) - def compileAll() = scalac(allSources) - def compileMixed() = compileAll() && compileJava() && compileScala() - - if (scalaSources.nonEmpty && javaSources.nonEmpty) compileMixed() - else compileScala() - } - } - - trait DiffableTest { - self: TestEntity => - - def checkFile: File = withExtension("check").toFile - def checkFileRequired = - returning(checkFile.isFile)(res => if (!res) warnAndLog("A checkFile at '%s' is mandatory.\n" format checkFile.path)) - - lazy val sourceFileNames = sourceFiles map (_.name) - - /** Given the difficulty of verifying that any selective approach works - * everywhere, the algorithm now is to look for the name of any known - * source file for this test, and if seen, remove all the non-whitespace - * preceding it. (Paths with whitespace don't work anyway.) This should - * wipe out all slashes, backslashes, C:\, cygwin/windows differences, - * and whatever else makes a simple diff not simple. - * - * The log and check file are both transformed, which I don't think is - * correct -- only the log should be -- but doing it this way until I - * can clarify martin's comments in #3283. - */ - def normalizePaths(s: String) = - sourceFileNames.foldLeft(s)((res, name) => res.replaceAll("""\S+\Q%s\E""" format name, name)) - - /** The default cleanup normalizes paths relative to sourcesDir, - * absorbs line terminator differences by going to lines and back, - * and trims leading or trailing whitespace. - */ - def diffCleanup(f: File) = safeLines(f) map normalizePaths mkString "\n" trim - - /** diffFiles requires actual Files as arguments but the output we want - * is the post-processed versions of log/check, so we resort to tempfiles. - */ - lazy val diffOutput = { - if (!checkFile.exists) "" else { - val input = diffCleanup(checkFile) - val output = diffCleanup(logFile) - def asFile(s: String) = returning(File.makeTemp("partest-diff"))(_ writeAll s) - - if (input == output) "" - else diffFiles(asFile(input), asFile(output)) - } - } - private def checkTraceName = tracePath(checkFile) - private def logTraceName = tracePath(logFile) - private def isDiffConfirmed = checkFile.exists && (diffOutput == "") - - private def sendTraceMsg() { - def result = - if (isDryRun) "" - else if (isDiffConfirmed) " [passed]" - else if (checkFile.exists) " [failed]" - else " [unchecked]" - - trace("diff %s %s%s".format(checkTraceName, logTraceName, result)) - } - - /** If optional is true, a missing check file is considered - * a successful diff. Necessary since many categories use - * checkfiles in an ad hoc manner. - */ - def runDiff() = { - sendTraceMsg() - - def updateCheck = ( - isUpdateCheck && { - val formatStr = "** diff %s %s: " + ( - if (checkFile.exists) "failed, updating '%s' and marking as passed." - else if (diffOutput == "") "not creating checkFile at '%s' as there is no output." - else "was unchecked, creating '%s' for future tests." - ) + "\n" - - normal(formatStr.format(checkTraceName, logTraceName, checkFile.path)) - if (diffOutput != "") normal(diffOutput) - - checkFile.writeAll(diffCleanup(logFile), "\n") - true - } - ) - - isDryRun || isDiffConfirmed || (updateCheck || !checkFile.exists) - } - } -} diff --git a/src/partest-alternative/scala/tools/partest/Alarms.scala b/src/partest-alternative/scala/tools/partest/Alarms.scala deleted file mode 100644 index ef30d13705..0000000000 --- a/src/partest-alternative/scala/tools/partest/Alarms.scala +++ /dev/null @@ -1,86 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools -package partest - -import java.util.{ Timer, TimerTask } - -trait Alarms { - self: Universe => - - def interruptMeIn[T](debugMsg: String, seconds: Int)(body: => T): Option[T] = { - val thisThread = currentThread - val alarm = new SimpleAlarm(seconds * 1000) set thisThread.interrupt() - debug("interruptMeIn(%d) '%s'".format(seconds, debugMsg)) - - try { Some(body) } - catch { case _: InterruptedException => debug("Received interrupted exception.") ; None } - finally { debug("Cancelling interruptMeIn '%s'" format debugMsg) ; alarm.cancel() ; Thread.interrupted() } - } - - case class AlarmerAction(secs: Int, action: () => Unit) extends Runnable { - override def run() = action() - } - - /** Set any number of alarms up with tuples of the form: - * seconds to alarm -> Function0[Unit] to execute - */ - class Alarmer(alarms: AlarmerAction*) { - import java.util.concurrent._ - - val exec = Executors.newSingleThreadScheduledExecutor() - alarms foreach (x => exec.schedule(x, x.secs, TimeUnit.SECONDS)) - exec.shutdown() - - def cancelAll() = exec.shutdownNow() - } - - class SimpleAlarm(timeout: Long) { - private val alarm = new Timer - - /** Start a timer, running the given body if it goes off. - */ - def set(body: => Unit) = returning(new TimerTask { def run() = body })(alarm.schedule(_, timeout)) - - /** Cancel the timer. - */ - def cancel() = alarm.cancel() - } - - trait TestAlarms { - test: TestEntity => - - private def warning1 = AlarmerAction(testWarning, () => warning( - """|I've been waiting %s seconds for this to complete: - | %s - |It may be stuck, or if not, it should be broken into smaller tests. - |""".stripMargin.format(testWarning, test)) - ) - private def warning2 = AlarmerAction(testWarning * 2, () => warning( - """|Now I've been waiting %s seconds for this to complete: - | %s - |If partest seems hung it would be a good place to look. - |""".stripMargin.format(testWarning * 2, test)) - ) - - def startAlarms(onTimeout: => Unit) = - if (isNoAlarms) new Alarmer() // for alarm debugging - else new Alarmer(Seq(warning1, warning2, AlarmerAction(testTimeout, () => onTimeout)): _*) - } - - // Thread.setDefaultUncaughtExceptionHandler(new UncaughtException) - // class UncaughtException extends Thread.UncaughtExceptionHandler { - // def uncaughtException(t: Thread, e: Throwable) { - // Console.println("Uncaught in %s: %s".format(t, e)) - // } - // } - // - // lazy val logger = File("/tmp/partest.log").bufferedWriter() - // def flog(msg: String) = logger synchronized { - // logger write (msg + "\n") - // logger.flush() - // } -} diff --git a/src/partest-alternative/scala/tools/partest/BuildContributors.scala b/src/partest-alternative/scala/tools/partest/BuildContributors.scala deleted file mode 100644 index 85ca895103..0000000000 --- a/src/partest-alternative/scala/tools/partest/BuildContributors.scala +++ /dev/null @@ -1,102 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import nsc.io._ -import nsc.util.ClassPath - -trait BuildContributors { - universe: Universe => - - /** A trait mixed into types which contribute a portion of the values. - * The basic mechanism is the TestBuild, TestCategory, and TestEntity - * can each contribute to each value. They are assembled at the last - * moment by the ContributorAssembler (presently the TestEntity.) - */ - trait BuildContributor { - def javaFlags: List[String] - def scalacFlags: List[String] - def classpathPaths: List[Path] - def buildProperties: List[(String, Any)] - def buildEnvironment: Map[String, String] - } - - trait ContributorAssembler { - def contributors: List[BuildContributor] - def assemble[T](what: BuildContributor => List[T]): List[T] = contributors flatMap what - - /** !!! This will need work if we want to achieve real composability, - * but it can wait for the demand. - */ - def assembleScalacArgs(args: List[String]) = assemble(_.scalacFlags) ++ args - def assembleJavaArgs(args: List[String]) = assemble(_.javaFlags) ++ args - def assembleProperties() = assemble(_.buildProperties) - def assembleClasspaths(paths: List[Path]) = assemble(_.classpathPaths) ++ paths - def assembleEnvironment() = assemble(_.buildEnvironment.toList).toMap - - def createClasspathString() = ClassPath fromPaths (assembleClasspaths(Nil) : _*) - def createPropertyString() = assembleProperties() map { case (k, v) => "-D%s=%s".format(k, v.toString) } - } - - trait BuildContribution extends BuildContributor { - self: TestBuild => - - /** The base classpath and system properties. - * !!! TODO - this should adjust itself depending on the build - * being tested, because pack and quick at least need different jars. - */ - def classpathPaths = List[Path](library, compiler, partest, fjbg) ++ forkJoinPath - def buildProperties = List( - "scala.home" -> testBuildDir, - "partest.lib" -> library, // used in jvm/inner - "java.awt.headless" -> true, - "user.language" -> "en", - "user.country" -> "US", - "partest.debug" -> isDebug, - "partest.verbose" -> isVerbose - // Disabled because there are no natives tests. - // "java.library.path" -> srcLibDir - ) - def javaFlags: List[String] = toArgs(javaOpts) - def scalacFlags: List[String] = toArgs(scalacOpts) - - /** We put the build being tested's /bin directory in the front of the - * path so the scripts and such written to execute "scala" will use this - * build and not whatever happens to be on their path. - */ - private def modifiedPath = ClassPath.join(scalaBin.path, Properties.envOrElse("PATH", "")) - def buildEnvironment = Map("PATH" -> modifiedPath) - } - - trait CategoryContribution extends BuildContributor { - self: DirBasedCategory => - - /** Category-wide classpath additions placed in /lib. */ - private def libContents = root / "lib" ifDirectory (_.list.toList) - - def classpathPaths = libContents getOrElse Nil - def buildProperties = Nil - def javaFlags = Nil - def scalacFlags = Nil - def buildEnvironment = Map() - } - - trait TestContribution extends BuildContributor with ContributorAssembler { - self: TestEntity => - - def jarsInTestDir = location.walk collect { case f: File if f hasExtension "jar" => f } toList - - def contributors = List(build, category, self) - def javaFlags = safeArgs(javaOptsFile) - def scalacFlags = safeArgs(scalaOptsFile) - def classpathPaths = jarsInTestDir :+ outDir - def buildProperties = List( - "partest.output" -> outDir.toAbsolute, // used in jvm/inner - "partest.cwd" -> outDir.parent.toAbsolute // used in shootout tests - ) - def buildEnvironment = Map("JAVA_OPTS" -> fromArgs(assembleJavaArgs(Nil))) - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/Categories.scala b/src/partest-alternative/scala/tools/partest/Categories.scala deleted file mode 100644 index c517a3f931..0000000000 --- a/src/partest-alternative/scala/tools/partest/Categories.scala +++ /dev/null @@ -1,70 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest - -import nsc.Settings -import nsc.io._ -import nsc.util.{ ClassPath } - -trait Categories { - self: Universe => - - trait TestCategory extends AbsTestCategory { - def kind: String - def startMessage: String = "Executing test group" - def testSequence: TestSequence - - class TestSettings(entity: TestEntity, error: String => Unit) extends Settings(error) { - def this(entity: TestEntity) = this(entity, Console println _) - - deprecation.value = false - encoding.value = "ISO-8859-1" - classpath.value = entity.testClasspath - outdir.value = entity.outDir.path - } - - def createSettings(entity: TestEntity): TestSettings = new TestSettings(entity) - def createTest(location: Path): TestEntity = - if (location.isFile) TestFile(this, location.toFile) - else if (location.isDirectory) TestDirectory(this, location.toDirectory) - else error("Failed to create test at '%s'" format location) - - /** Category test identification. - */ - def denotesTestFile(p: Path) = p.isFile && (p hasExtension "scala") - def denotesTestDir(p: Path) = p.isDirectory && !ignorePath(p) - def denotesTest(p: Path) = denotesTestDir(p) || denotesTestFile(p) - - /** This should verify that all necessary files are present. - * By default it delegates to denotesTest. - */ - def denotesValidTest(p: Path) = denotesTest(p) - } - - abstract class DirBasedCategory(val kind: String) extends TestCategory with CategoryContribution { - lazy val root = Directory(src / kind).normalize - def enumerate = root.list filter denotesTest map createTest toList - - /** Standard actions. These can be overridden either on the - * Category level or by individual tests. - */ - def compile: TestStep = (_: TestEntity).compile() - def checkFileRequired: TestStep = (_: TestEntity).checkFileRequired - def diff: TestStep = (_: TestEntity).diff() - def run: TestStep = (_: TestEntity).run() - def exec: TestStep = (_: TestEntity).exec() - - /** Combinators. - */ - def not(f: TestStep): TestStep = !f(_: TestEntity) - - override def toString = kind - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/Compilable.scala b/src/partest-alternative/scala/tools/partest/Compilable.scala deleted file mode 100644 index 65b5d5da0e..0000000000 --- a/src/partest-alternative/scala/tools/partest/Compilable.scala +++ /dev/null @@ -1,106 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import scala.tools.nsc.io._ -import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError } -import scala.tools.nsc.util.{ ClassPath } -import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } - -trait PartestCompilation { - self: Universe => - - trait CompileExecSupport extends ExecSupport { - self: TestEntity => - - def javacpArg = "-classpath " + testClasspath - def scalacpArg = "-usejavacp" - - /** Not used, requires tools.jar. - */ - // def javacInternal(args: List[String]) = { - // import com.sun.tools.javac.Main - // Main.compile(args.toArray, logWriter) - // } - - def javac(args: List[String]): Boolean = { - val allArgString = fromArgs(javacpArg :: javacOpts :: args) - - // javac -d outdir -classpath - val cmd = "%s -d %s %s".format(javacCmd, outDir, allArgString) - def traceMsg = - if (isVerbose) cmd - else "%s -d %s %s".format(tracePath(Path(javacCmd)), tracePath(outDir), fromArgs(args)) - - trace(traceMsg) - - isDryRun || execAndLog(cmd) - } - - def scalac(args: List[String]): Boolean = { - val allArgs = assembleScalacArgs(args) - val (global, files) = newGlobal(allArgs) - def nonFileArgs = if (isVerbose) global.settings.recreateArgs else assembleScalacArgs(Nil) - def traceArgs = fromArgs(nonFileArgs ++ (files map tracePath)) - def traceMsg = "scalac " + traceArgs - - trace(traceMsg) - isDryRun || global.partestCompile(files, true) - } - - /** Actually running the test, post compilation. - * Normally args will be List("Test", "jvm"), main class and arg to it. - */ - def runScala(args: List[String]): Boolean = { - val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner" - - // java $JAVA_OPTS -classpath - val javaCmdAndOptions = javaCmd +: assembleJavaArgs(List(javacpArg)) - // MainGenericRunner -usejavacp Test jvm - val scalaCmdAndOptions = List(scalaRunnerClass, scalacpArg) ++ assembleScalacArgs(args) - // Assembled - val cmd = fromArgs(javaCmdAndOptions ++ createPropertyString() ++ scalaCmdAndOptions) - - def traceMsg = if (isVerbose) cmd else fromArgs(javaCmd :: args) - trace("runScala: " + traceMsg) - - isDryRun || execAndLog(cmd) - } - - def newReporter(settings: Settings) = new ConsoleReporter(settings, Console.in, logWriter) - - class PartestGlobal(settings: Settings, val creporter: ConsoleReporter) extends Global(settings, creporter) { - def partestCompile(files: List[String], printSummary: Boolean): Boolean = { - try { new Run compile files } - catch { - case FatalError(msg) => creporter.error(null, "fatal error: " + msg) - case ae: AssertionError => creporter.error(null, ""+ae) - case te: TypeError => creporter.error(null, ""+te) - case ex => - creporter.error(null, ""+ex) - throw ex - } - - if (printSummary) - creporter.printSummary - - creporter.flush() - !creporter.hasErrors - } - } - - def newGlobal(args: List[String]): (PartestGlobal, List[String]) = { - val settings = category createSettings self - val command = new CompilerCommand(args, settings) - val reporter = newReporter(settings) - - if (!command.ok) - debug("Error parsing arguments: '%s'".format(args mkString ", ")) - - (new PartestGlobal(command.settings, reporter), command.files) - } - } -} diff --git a/src/partest-alternative/scala/tools/partest/Config.scala b/src/partest-alternative/scala/tools/partest/Config.scala deleted file mode 100644 index ee1852f6ed..0000000000 --- a/src/partest-alternative/scala/tools/partest/Config.scala +++ /dev/null @@ -1,115 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import io._ -import nsc.io._ -import Properties._ - -trait Config { - universe: Universe => - - lazy val src = absolutize(srcDir).toDirectory - lazy val build = new TestBuild() - - def javaHomeEnv = envOrElse("JAVA_HOME", null) - def javaCmd = envOrElse("JAVACMD", "java") - def javacCmd = Option(javaHomeEnv) map (x => Path(x) / "bin" / "javac" path) getOrElse "javac" - - /** Values related to actors. The timeouts are in seconds. On a dry - * run we only allocate one worker so the output isn't interspersed. - */ - def workerTimeout = 3600 // 1 hour, probably overly generous - def numWorkers = if (isDryRun) 1 else propOrElse("partest.actors", "8").toInt - def expectedErrors = propOrElse("partest.errors", "0").toInt - def poolSize = (wrapAccessControl(propOrNone("actors.corePoolSize")) getOrElse "16").toInt - - def allScalaFiles = src.deepFiles filter (_ hasExtension "scala") - def allObjDirs = src.deepDirs filter (_ hasExtension "obj") - def allLogFiles = src.deepFiles filter (_ hasExtension "log") - def allClassFiles = src.deepFiles filter (_ hasExtension "class") - - class TestBuild() extends BuildContribution { - import nsc.util.ClassPath - - /** Scala core libs. - */ - val library = pathForComponent("library") - val compiler = pathForComponent("compiler") - val partest = pathForComponent("partest") - val scalap = pathForComponent("scalap", "%s.jar") - - /** Scala supplementary libs - these are not all needed for all build targets, - * and some of them are copied inside other jars in later targets. However quick - * for instance cannot be run without some of these. - */ - val fjbg = pathForLibrary("fjbg") - val msil = pathForLibrary("msil") - val forkjoin = pathForLibrary("forkjoin") - val scalacheck = pathForLibrary("scalacheck") - - /** Other interesting paths. - */ - val scalaBin = testBuildDir / "bin" - - /** A hack for now to get quick running. - */ - def needsForkJoin = { - val loader = nsc.util.ScalaClassLoader.fromURLs(List(library.toURL)) - val fjMarker = "scala.concurrent.forkjoin.ForkJoinTask" - val clazz = loader.tryToLoadClass(fjMarker) - - if (clazz.isDefined) debug("Loaded ForkJoinTask OK, don't need jar.") - else debug("Could not load ForkJoinTask, putting jar on classpath.") - - clazz.isEmpty - } - lazy val forkJoinPath: List[Path] = if (needsForkJoin) List(forkjoin) else Nil - - /** Internal **/ - private def repo = partestDir.parent.normalize - - private def pathForComponent(what: String, jarFormat: String = "scala-%s.jar"): Path = { - def asDir = testBuildDir / "classes" / what - def asJar = testBuildDir / "lib" / jarFormat.format(what) - - if (asDir.isDirectory) asDir - else if (asJar.isFile) asJar - else "" - } - private def pathForLibrary(what: String) = File(repo / "lib" / (what + ".jar")) - } - - def printConfigBanner() = { - debug("Java VM started with arguments: '%s'" format fromArgs(Process.javaVmArguments)) - debug("System Properties:\n" + util.allPropertiesString()) - - normal(configBanner()) - } - - /** Treat an access control failure as None. */ - private def wrapAccessControl[T](body: => Option[T]): Option[T] = - try body catch { case _: java.security.AccessControlException => None } - - private def configBanner() = { - val javaBin = Path(javaHome) / "bin" - val javaInfoString = "%s (build %s, %s)".format(javaVmName, javaVmVersion, javaVmInfo) - - List( - "Scala compiler classes in: " + testBuildDir, - "Scala version is: " + nsc.Properties.versionMsg, - "Scalac options are: " + universe.scalacOpts, - "Java binaries in: " + javaBin, - "Java runtime is: " + javaInfoString, - "Java runtime options: " + (Process.javaVmArguments mkString " "), - "Javac options are: " + universe.javacOpts, - "Java options are: " + universe.javaOpts, - "Source directory is: " + src, - "Selected categories: " + (selectedCategories mkString " "), - "" - ) mkString "\n" - } -} diff --git a/src/partest-alternative/scala/tools/partest/Dispatcher.scala b/src/partest-alternative/scala/tools/partest/Dispatcher.scala deleted file mode 100644 index 69efc353eb..0000000000 --- a/src/partest-alternative/scala/tools/partest/Dispatcher.scala +++ /dev/null @@ -1,162 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Philipp Haller - */ - -package scala.tools -package partest - -import scala.tools.nsc.io._ -import scala.actors.{ Actor, TIMEOUT } -import scala.actors.Actor._ -import scala.collection.immutable -import scala.util.control.Exception.ultimately - -/** The machinery for concurrent execution of tests. Each Worker - * is given a bundle of tests, which it runs sequentially and then - * sends a report back to the dispatcher. - */ -trait Dispatcher { - partest: Universe => - - /** The public entry point. The given filter narrows down the list of - * tests to run. - */ - def runSelection(categories: List[TestCategory], filt: TestEntity => Boolean = _ => true): CombinedTestResults = { - // Setting scala.home informs tests where to obtain their jars. - setProp("scala.home", testBuildDir.path) - - val allTests = allCategories flatMap (_.enumerate) - val selected = allTests filter filt - val groups = selected groupBy (_.category) - val count = selected.size - - if (count == 0) return CombinedTestResults(0, 0, 0, Nil) - else if (count == allTests.size) verbose("Running all %d tests." format count) - else verbose("Running %d/%d tests: %s".format(count, allTests.size, toStringTrunc(selected map (_.label) mkString ", "))) - - allCategories collect { case x if groups contains x => runCategory(x, groups(x)) } reduceLeft (_ ++ _) - } - - private def parallelizeTests(tests: List[TestEntity]): immutable.Map[TestEntity, TestResult] = { - // propagate verbosity - if (isDebug) scala.actors.Debug.level = 3 - - // "If elected, I guarantee a slice of tests for every worker!" - val groups = tests grouped ((tests.size / numWorkers) + 1) toList - - // "Workers, line up for assignments!" - val workers = - for ((slice, workerNum) <- groups.zipWithIndex) yield { - returning(new Worker(workerNum)) { worker => - worker.start() - worker ! TestsToRun(slice) - } - } - - normal("Started %d workers with ~%d tests each.\n".format(groups.size, groups.head.size)) - - /** Listening for news from the proletariat. - */ - (workers map { w => - receiveWithin(workerTimeout * 1000) { - case ResultsOfRun(resultMap) => resultMap - case TIMEOUT => - warning("Worker %d timed out." format w.workerNum) - // mark all the worker's tests as having timed out - should be hard to miss - // immutable.Map[TestEntity, TestResult]() - groups(w.workerNum) map (x => (x -> new Timeout(x))) toMap - } - }) reduceLeft (_ ++ _) - } - - private def runCategory(category: TestCategory, tests: List[TestEntity]): CombinedTestResults = { - val kind = category.kind - normal("%s (%s tests in %s)\n".format(category.startMessage, tests.size, category)) - - val (milliSeconds, resultMap) = timed2(parallelizeTests(tests)) - val (passed, failed) = resultsToStatistics(resultMap mapValues (_.state)) - val failures = resultMap.values filterNot (_.passed) toList - - CombinedTestResults(passed, failed, milliSeconds, failures) - } - - /** A Worker is given a bundle of tests and runs them all sequentially. - */ - class Worker(val workerNum: Int) extends Actor { - def act() { - react { case TestsToRun(tests) => - val master = sender - runTests(tests)(results => master ! ResultsOfRun(results)) - } - } - - /** Runs the tests. Passes the result Map to onCompletion when done. - */ - private def runTests(tests: List[TestEntity])(onCompletion: immutable.Map[TestEntity, TestResult] => Unit) { - var results = new immutable.HashMap[TestEntity, TestResult] // maps tests to results - val numberOfTests = tests.size - val testIterator = tests.iterator - def processed = results.size - def isComplete = testIterator.isEmpty - - def atThreshold(num: Double) = { - require(num >= 0 && num <= 1.0) - ((processed - 1).toDouble / numberOfTests <= num) && (processed.toDouble / numberOfTests >= num) - } - - def extraMessage = { - // for now quiet for normal people - if (isVerbose || isTrace || isDebug) { - if (isComplete) "(#%d 100%%)" format workerNum - else if (isVerbose) "(#%d %d/%d)".format(workerNum, processed, numberOfTests) - else if (isTrace && atThreshold(0.5)) "(#%d 50%%)" format workerNum - else "" - } - else "" - } - - def countAndReport(result: TestResult) { - val TestResult(test, state) = result - // refuse to count an entity twice - if (results contains test) - return warning("Received duplicate result for %s: was %s, now %s".format(test, results(test), state)) - - // increment the counter for this result state - results += (test -> result) - - // show on screen - if (isDryRun) normal("\n") // blank line between dry run traces - else result show extraMessage - - // remove log if successful - if (result.passed) - test.deleteLog() - - // Respond to master if this Worker is complete - if (isComplete) - onCompletion(results) - } - - Actor.loopWhile(testIterator.hasNext) { - val parent = self - // pick a test and set some alarms - val test = testIterator.next - val alarmer = test startAlarms (parent ! new Timeout(test)) - - actor { - ultimately(alarmer.cancelAll()) { - // Calling isSuccess forces the lazy val "process" inside the test, running it. - val res = test.isSuccess - // Cancel the alarms and alert the media. - parent ! TestResult(test, res) - } - } - - react { - case x: TestResult => countAndReport(x) - } - } - } - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/Entities.scala b/src/partest-alternative/scala/tools/partest/Entities.scala deleted file mode 100644 index 301deb972b..0000000000 --- a/src/partest-alternative/scala/tools/partest/Entities.scala +++ /dev/null @@ -1,74 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Philipp Haller - */ - -package scala.tools -package partest - -import nsc.io._ - -trait Entities { - self: Universe => - - abstract class TestEntity extends AbsTestEntity - with TestContribution - with TestHousekeeping - with TestAlarms - with EntityLogging - with CompilableTest - with ScriptableTest - with DiffableTest { - def location: Path - def category: TestCategory - - lazy val label = location.stripExtension - lazy val testClasspath = returning(createClasspathString())(x => vtrace("testClasspath: " + x)) - - /** Was this test successful? Calling this for the first time forces - * lazy val "process" which actually runs the test. - */ - def isSuccess = process - - /** Some standard files, which may or may not be present. - */ - def scalaOptsFile = withExtension("flags").toFile // opts to scalac - def javaOptsFile = withExtension("javaopts").toFile // opts to java (but not javac) - def commandFile = withExtension("cmds").toFile // sequence of commands to execute - def logFile = withExtension("log").toFile // collected output - - /** Some standard directories. - */ - def outDir = withExtension("obj").toDirectory // output dir, e.g. files/pos/t14.obj - def categoryDir = location.parent.normalize // category dir, e.g. files/pos/ - def sourcesDir = location ifDirectory (_.normalize) getOrElse categoryDir - - /** Standard arguments for run, exec, diff. - */ - def argumentsToRun = List("Test", "jvm") - def argumentsToExec = List(location.path) - - /** Using a .cmds file for a custom test sequence. - */ - def commandList = safeLines(commandFile) - def testSequence = - if (commandFile.isFile && commandList.nonEmpty) commandList map customTestStep - else category.testSequence - - def run() = runScala(argumentsToRun) - def exec() = runExec(argumentsToExec) - def diff() = runDiff() // checkFile, logFile - - /** The memoized result of the test run. - */ - private lazy val process = { - val outcome = runWrappers(testSequence.actions forall (f => f(this))) - - // an empty outcome means we've been interrupted and are shutting down. - outcome getOrElse false - } - } - - case class TestDirectory(category: TestCategory, location: Directory) extends TestEntity { } - case class TestFile(category: TestCategory, location: File) extends TestEntity { } -} diff --git a/src/partest-alternative/scala/tools/partest/Housekeeping.scala b/src/partest-alternative/scala/tools/partest/Housekeeping.scala deleted file mode 100644 index cfdecee9c7..0000000000 --- a/src/partest-alternative/scala/tools/partest/Housekeeping.scala +++ /dev/null @@ -1,187 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import scala.util.control.Exception.catching -import util._ -import nsc.io._ -import Process.runtime -import Properties._ - -/** An agglomeration of code which is low on thrills. Hopefully - * it operates so quietly in the background that you never have to - * look at this file. - */ -trait Housekeeping { - self: Universe => - - /** Orderly shutdown on ctrl-C. */ - @volatile private var _shuttingDown = false - protected def setShuttingDown() = { - /** Whatever we want to do as shutdown begins goes here. */ - if (!_shuttingDown) { - warning("Received shutdown signal, partest is cleaning up...\n") - _shuttingDown = true - } - } - def isShuttingDown = _shuttingDown - - /** Execute some code with a shutdown hook in place. This is - * motivated by the desire not to leave the filesystem full of - * junk when someone ctrl-Cs a test run. - */ - def withShutdownHook[T](hook: => Unit)(body: => T): Option[T] = - /** Java doesn't like it if you keep adding and removing shutdown - * hooks after shutdown has begun, so we trap the failure. - */ - catching(classOf[IllegalStateException]) opt { - val t = new Thread() { - override def run() = { - setShuttingDown() - hook - } - } - runtime addShutdownHook t - - try body - finally runtime removeShutdownHook t - } - - /** Search for a directory, possibly given only a name, by starting - * at the current dir and walking upward looking for it at each level. - */ - protected def searchForDir(name: String): Directory = { - val result = Path(name) ifDirectory (x => x.normalize) orElse { - val cwd = Directory.Current getOrElse error("user.dir property not set") - val dirs = cwd :: cwd.parents map (_ / name) - - Path onlyDirs dirs map (_.normalize) headOption - } - - result getOrElse error("Fatal: could not find directory '%s'" format name) - } - - /** Paths we ignore for most purposes. - */ - def ignorePath(x: Path) = { - (x.name startsWith ".") || - (x.isDirectory && ((x.name == "lib") || x.hasExtension("obj", "svn"))) - } - /** Make a possibly relative path absolute using partestDir as the base. - */ - def absolutize(path: String) = Path(path) toAbsoluteWithRoot partestDir - - /** Go on a deleting binge. - */ - def cleanupAll() { - if (isNoCleanup) - return - - val (dirCount, fileCount) = (cleanupObjDirs(), cleanupLogs() + cleanupJunk()) - if (dirCount + fileCount > 0) - normal("Cleaned up %d directories and %d files.\n".format(dirCount, fileCount)) - } - - def cleanupObjDirs() = countTrue(allObjDirs collect { case x if x.exists => x.deleteRecursively() }) - def cleanupJunk() = countTrue(allClassFiles collect { case x if x.exists => x.delete() }) - def cleanupLogs() = countTrue(allLogFiles collect { case x if x.exists => x.delete() }) - - /** Look through every file in the partest directory and ask around - * to make sure someone knows him. Complain about strangers. - */ - def validateAll() { - def denotesTest(p: Path) = allCategories exists (_ denotesTest p) - def isMSILcheck(p: Path) = p.name endsWith "-msil.check" - - def analyzeCategory(cat: DirBasedCategory) = { - val allTests = cat.enumerate - val otherPaths = cat.root walkFilter (x => !ignorePath(x)) filterNot (cat denotesTest _) filterNot isMSILcheck toList - val count = otherPaths.size - - println("Validating %d non-test paths in %s.".format(count, cat.kind)) - - for (path <- otherPaths) { - (allTests find (_ acknowledges path)) match { - case Some(test) => if (isVerbose) println(" OK: '%s' is claimed by '%s'".format(path, test.label)) - case _ => println(">> Unknown path '%s'" format path) - } - } - } - - allCategories collect { case x: DirBasedCategory => analyzeCategory(x) } - } - - trait TestHousekeeping { - self: TestEntity => - - /** Calculating derived files. Given a test like - * files/run/foo.scala or files/run/foo/ - * This creates paths like foo.check, foo.flags, etc. - */ - def withExtension(extension: String) = categoryDir / "%s.%s".format(label, extension) - - /** True for a path if this test acknowledges it belongs to this test. - * Overridden by some categories. - */ - def acknowledges(path: Path): Boolean = { - val loc = location.normalize - val knownPaths = List(scalaOptsFile, javaOptsFile, commandFile, logFile, checkFile) ++ jarsInTestDir - def isContainedSource = location.isDirectory && isJavaOrScala(path) && (path.normalize startsWith loc) - - (knownPaths exists (_ isSame path)) || isContainedSource - } - - /** This test "responds to" this String. This could mean anything -- it's a - * way of specifying ad-hoc collections of tests to exercise only a subset of tests. - * At present it looks for the given String in all the test sources. - */ - def respondsToString(str: String) = containsString(str) - def containsString(str: String) = { - debug("Checking %s for \"%s\"".format(sourceFiles mkString ", ", str)) - sourceFiles map safeSlurp exists (_ contains str) - } - - def possiblyTimed[T](body: => T): T = { - if (isStats) timed(recordTestTiming(label, _))(body) - else body - } - - private def prepareForTestRun() = { - // make sure we have a clean slate - deleteLog(force = true) - if (outDir.exists) - outDir.deleteRecursively() - - // recreate object dir - outDir createDirectory true - } - def deleteOutDir() = outDir.deleteRecursively() - def deleteShutdownHook() = { debug("Shutdown hook deleting " + outDir) ; deleteOutDir() } - - protected def runWrappers[T](body: => T): Option[T] = { - prepareForTestRun() - - withShutdownHook(deleteShutdownHook()) { - loggingOutAndErr { - val result = possiblyTimed { body } - if (!isNoCleanup) - deleteOutDir() - - result - } - } - } - - override def toString = location.path - override def equals(other: Any) = other match { - case x: TestEntity => location.normalize == x.location.normalize - case _ => false - } - override def hashCode = location.normalize.hashCode - } - - private def countTrue(f: => Iterator[Boolean]) = f filter (_ == true) length -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/Partest.scala b/src/partest-alternative/scala/tools/partest/Partest.scala deleted file mode 100644 index 74a3a6a19b..0000000000 --- a/src/partest-alternative/scala/tools/partest/Partest.scala +++ /dev/null @@ -1,81 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import nsc.io._ -import nsc.util._ -import category.AllCategories - -/** Global object for a Partest run. It is completely configured by the list - * of arguments passed to the constructor (although there are a few properties - * and environment variables which can influence matters.) See PartestSpec.scala - * for the complete list. - */ -class Partest(args: List[String]) extends { - val parsed = PartestSpec(args: _*) -} with Universe with PartestSpec with cmd.Instance with AllCategories { - - if (parsed.propertyArgs.nonEmpty) - debug("Partest property args: " + fromArgs(parsed.propertyArgs)) - - debug("Partest created with args: " + fromArgs(args)) - - def helpMsg = PartestSpec.helpMsg - - // The abstract values from Universe. - lazy val testBuildDir = searchForDir(buildDir) - lazy val partestDir = searchForDir(rootDir) - lazy val allCategories = List(Pos, Neg, Run, Jvm, Res, Shootout, Scalap, Scalacheck, BuildManager, Script) - lazy val selectedCategories = if (isAllImplied) allCategories else specifiedCats - - def specifiedTests = parsed.residualArgs map (x => Path(x).normalize) - def specifiedKinds = testKinds filter (x => isSet(x) || (runSets contains x)) - def specifiedCats = specifiedKinds flatMap (x => allCategories find (_.kind == x)) - def isAllImplied = isAll || (specifiedTests.isEmpty && specifiedKinds.isEmpty) - - /** Assembles a filter based on command line options which restrict the test set - * --grep limits to only matching tests - * --failed limits to only recently failed tests (log file is present) - * -- limits to only the given tests and categories (but --all overrides) - * path/to/Test limits to only the given tests and categories - */ - lazy val filter = { - def indivFilter(test: TestEntity) = specifiedTests contains test.location.normalize - def categoryFilter(test: TestEntity) = specifiedCats contains test.category - def indivOrCat(test: TestEntity) = isAllImplied || indivFilter(test) || categoryFilter(test) // combines previous two - - def failedFilter(test: TestEntity) = !isFailed || (test.logFile exists) - def grepFilter(test: TestEntity) = grepExpr.isEmpty || (test containsString grepExpr.get) - def combinedFilter(x: TestEntity) = indivOrCat(x) && failedFilter(x) && grepFilter(x) // combines previous three - - combinedFilter _ - } - - def launchTestSuite() = { - def onTimeout() = { - warning("Partest test run timed out after " + timeout + " seconds.\n") - System.exit(-1) - } - val alarm = new Alarmer(AlarmerAction(timeout, () => onTimeout())) - - try runSelection(selectedCategories, filter) - finally alarm.cancelAll() - } -} - -object Partest { - def fromBuild(dir: String, args: String*): Partest = apply("--builddir" +: dir +: args: _*) - def apply(args: String*): Partest = new Partest(args.toList) - - // builds without partest jars won't actually work - def starr() = fromBuild("") - def locker() = fromBuild("build/locker") - def quick() = fromBuild("build/quick") - def pack() = fromBuild("build/pack") - def strap() = fromBuild("build/strap") - def dist() = fromBuild("dists/latest") -} - diff --git a/src/partest-alternative/scala/tools/partest/PartestSpec.scala b/src/partest-alternative/scala/tools/partest/PartestSpec.scala deleted file mode 100644 index 75d94bdb72..0000000000 --- a/src/partest-alternative/scala/tools/partest/PartestSpec.scala +++ /dev/null @@ -1,104 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Paul Phillips - */ - -package scala.tools -package partest - -import nsc.io._ -import cmd._ - -/** This takes advantage of bits of scala goodness to fully define a command - * line program with a minimum of duplicated code. When the specification object - * is created, the vals are evaluated in order and each of them side effects - * a private accumulator. What emerges is a full list of the valid unary - * and binary arguments, as well as autogenerated help. - */ -trait PartestSpec extends Spec with Meta.StdOpts with Interpolation { - def referenceSpec = PartestSpec - def programInfo = Spec.Info("partest", "", "scala.tools.partest.Runner") - private val kind = new Spec.Accumulator[String]() - protected def testKinds = kind.get - - private implicit val tokenizeString = FromString.ArgumentsFromString // String => List[String] - - help(""" - |# Pro Tip! Instant bash completion: `partest --bash` (note backticks) - |Usage: partest [] [ ...] - | : a path to a test designator, typically a .scala file or a directory. - | Examples: files/pos/test1.scala, files/res/bug785 - | - | Test categories:""".stripMargin) - - val isAll = ("all" / "run all tests (default, unless no options given)" --?) - (kind("pos") / "Compile files that are expected to build" --?) - (kind("neg") / "Compile files that are expected to fail" --?) - (kind("run") / "Test JVM backend" --?) - (kind("jvm") / "Test JVM backend" --?) - (kind("res") / "Run resident compiler scenarii" --?) - (kind("buildmanager") / "Run Build Manager scenarii" --?) - (kind("scalacheck") / "Run Scalacheck tests" --?) - (kind("script") / "Run script files" --?) - (kind("shootout") / "Run shootout tests" --?) - (kind("scalap") / "Run scalap tests" --?) - - heading ("""Test "smart" categories:""") - val grepExpr = "grep" / "run all tests with a source file containing " --| - val isFailed = "failed" / "run all tests which failed on the last run" --? - - heading ("Specifying paths and additional flags, ~ means repository root:") - - val rootDir = "rootdir" / "path from ~ to partest" defaultTo "test" - val buildDir = "builddir" / "path from ~ to test build" defaultTo "build/pack" - val srcDir = "srcdir" / "path from --rootdir to sources" defaultTo "files" - val javaOpts = "javaopts" / "flags to java on all runs" defaultToEnv "JAVA_OPTS" - val javacOpts = "javacopts" / "flags to javac on all runs" defaultToEnv "JAVAC_OPTS" - val scalacOpts = "scalacopts" / "flags to scalac on all tests" defaultToEnv "SCALAC_OPTS" - - "pack" / "" expandTo ("--builddir", "build/pack") - "quick" / "" expandTo ("--builddir", "build/quick") - - heading ("Options influencing output:") - val isTrace = "trace" / "show the individual steps taken by each test" --? - val isShowDiff = "show-diff" / "show diff between log and check file" --? - val isShowLog = "show-log" / "show log on failures" --? - val isDryRun = "dry-run" / "do not run tests, only show their traces." --? - val isTerse = "terse" / "be less verbose (almost silent except for failures)" --? - val isVerbose = "verbose" / "be more verbose (additive with --trace)" --? - val isDebug = "debug" / "maximum debugging output" --? - val isAnsi = "ansi" / "print output in color" --? - - heading ("Other options:") - val timeout = "timeout" / "Overall timeout in seconds" defaultTo 7200 - val testWarning = "test-warning" / "Test warning in seconds" defaultTo 90 - val testTimeout = "test-timeout" / "Test timeout in seconds" defaultTo 900 - val isCleanup = "cleanup" / "delete all stale files and dirs before run" --? - val isNoCleanup = "nocleanup" / "do not delete any logfiles or object dirs" --? - val isStats = "stats" / "collect and print statistics about the tests" --? - val isValidate = "validate" / "examine test filesystem for inconsistencies" --? - val isUpdateCheck = "update-check" / "overwrite checkFile if diff fails" --? - - "version" / "print version" --> runAndExit(println(Properties.versionMsg)) - - // no help for anything below this line - secret options - // mostly intended for property configuration. - val runSets = ("runsets" --^) getOrElse Nil - val isNoAlarms = "noalarms" --? - val isInsideAnt = "is-in-ant" --? -} - -object PartestSpec extends PartestSpec with Property { - lazy val propMapper = new PropertyMapper(PartestSpec) { - override def isPassThrough(key: String) = key == "partest.options" - } - - type ThisCommandLine = PartestCommandLine - class PartestCommandLine(args: List[String]) extends SpecCommandLine(args) { - override def errorFn(msg: String) = printAndExit("Error: " + msg) - - def propertyArgs = PartestSpec.propertyArgs - } - - override def creator(args: List[String]): PartestCommandLine = new PartestCommandLine(args) -} diff --git a/src/partest-alternative/scala/tools/partest/Properties.scala b/src/partest-alternative/scala/tools/partest/Properties.scala deleted file mode 100644 index 2d36f163c8..0000000000 --- a/src/partest-alternative/scala/tools/partest/Properties.scala +++ /dev/null @@ -1,17 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.tools -package partest - -/** Loads partest.properties from the jar. */ -object Properties extends scala.util.PropertiesTrait { - protected def propCategory = "partest" - protected def pickJarBasedOn = classOf[Application] -} diff --git a/src/partest-alternative/scala/tools/partest/Results.scala b/src/partest-alternative/scala/tools/partest/Results.scala deleted file mode 100644 index e0fceed17a..0000000000 --- a/src/partest-alternative/scala/tools/partest/Results.scala +++ /dev/null @@ -1,121 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import scala.collection.immutable - -trait Results { - self: Universe => - - /** A collection of tests for a Worker. - */ - case class TestsToRun(entities: List[TestEntity]) - - /** The response from a Worker who has been given TestsToRun. - */ - case class ResultsOfRun(results: immutable.Map[TestEntity, TestResult]) - - /** The result of a single test. (0: OK, 1: FAILED, 2: TIMEOUT) - */ - sealed abstract class TestResult(val state: Int, val description: String) { - def entity: TestEntity - - def passed = state == 0 - def colorize(s: String): String - def show(msg: String) = - if (!isShuttingDown) - showResult(colorize(description), msg) - - private def outputPrefix = if (isInsideAnt) "" else markNormal("partest: ") - private def name = src relativize entity.location // e.g. "neg/test.scala" - private def showResult(status: String, extraMsg: String) = - normal(outputPrefix + "[...]/%-40s [%s] %s\n".format(name, status, extraMsg)) - - override def equals(other: Any) = other match { - case x: TestResult => entity == x.entity - case _ => false - } - override def hashCode = entity.hashCode - override def toString = "%s [%s]".format(entity, description) - } - - class Success(val entity: TestEntity) extends TestResult(0, " OK ") { - def colorize(s: String) = markSuccess(s) - override def show(msg: String) = if (!isTerse) super.show(msg) - } - class Failure(val entity: TestEntity) extends TestResult(1, " FAILED ") { - def colorize(s: String) = markFailure(s) - - override def show(msg: String) = { - super.show(msg) - - if (isShowDiff || isTrace) - normal(entity.diffOutput) - - if (isShowLog || isTrace) - normal(toStringTrunc(entity.failureMessage(), 1600)) - } - override def toString = List(super.toString, toStringTrunc(entity.failureMessage(), 400)) mkString "\n" - } - class Timeout(val entity: TestEntity) extends TestResult(2, "TIME OUT") { - def colorize(s: String) = markFailure(s) - } - - object TestResult { - def apply(entity: TestEntity, success: Boolean) = - if (success) new Success(entity) - else new Failure(entity) - - def apply(entity: TestEntity, state: Int) = state match { - case 0 => new Success(entity) - case 1 => new Failure(entity) - case 2 => new Timeout(entity) - } - def unapply(x: Any) = x match { - case x: TestResult => Some((x.entity, x.state)) - case _ => None - } - } - - /** The combined results of any number of tests. - */ - case class CombinedTestResults( - passed: Int, - failed: Int, - elapsedMilliseconds: Long, - failures: List[TestResult] - ) { - // housekeeping - val elapsedSecs = elapsedMilliseconds / 1000 - val elapsedMins = elapsedSecs / 60 - val elapsedHrs = elapsedMins / 60 - val dispMins = elapsedMins - elapsedHrs * 60 - val dispSecs = elapsedSecs - elapsedMins * 60 - - def total = passed + failed - def hasFailures = failed > 0 - def exitCode = if (expectedErrors == failed) 0 else 1 - - def ++(x: CombinedTestResults) = CombinedTestResults( - passed + x.passed, - failed + x.failed, - elapsedMilliseconds + x.elapsedMilliseconds, - failures ::: x.failures - ) - - def elapsedString = "%02d:%02d:%02d".format(elapsedHrs, dispMins, dispSecs) - def failuresString = { - if (failures.isEmpty) "" - else "Summary of failures:" :: failures mkString ("\n", "\n", "") - } - - override def toString = - if (total == 0) "There were no tests to run." - else if (isDryRun) "%d tests would be run." format total - else if (hasFailures) "%d of %d tests failed (elapsed time: %s)".format(failed, total, elapsedString) + failuresString - else "All %d tests were successful (elapsed time: %s)".format(total, elapsedString) - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/Runner.scala b/src/partest-alternative/scala/tools/partest/Runner.scala deleted file mode 100644 index 7fe2c98d43..0000000000 --- a/src/partest-alternative/scala/tools/partest/Runner.scala +++ /dev/null @@ -1,36 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Philipp Haller - */ - -package scala.tools -package partest - -import nsc.io._ - -object Runner { - def main(args: Array[String]) { - val runner = Partest(args: _*) - import runner._ - - if (args.isEmpty) return println(helpMsg) - if (isValidate) return validateAll() - - printConfigBanner() - - if (isCleanup) - cleanupAll() - - val result = launchTestSuite() - val exitCode = result.exitCode - val message = "\n" + result + "\n" - - if (exitCode == 0) success(message) - else failure(message) - - if (isStats) - showTestStatistics() - - System exit exitCode - } -} diff --git a/src/partest-alternative/scala/tools/partest/Statistics.scala b/src/partest-alternative/scala/tools/partest/Statistics.scala deleted file mode 100644 index e90377cfa7..0000000000 --- a/src/partest-alternative/scala/tools/partest/Statistics.scala +++ /dev/null @@ -1,46 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Philipp Haller - */ - -package scala.tools -package partest - -import scala.collection.mutable - -trait Statistics { - /** Only collected when --stats is given. */ - lazy val testStatistics = new mutable.HashMap[String, Long] - - /** Given function and block of code, evaluates code block, - * calls function with milliseconds elapsed, and returns block result. - */ - def timed[T](f: Long => Unit)(body: => T): T = { - val start = System.currentTimeMillis - val result = body - val end = System.currentTimeMillis - - f(end - start) - result - } - /** Times body and returns both values. - */ - def timed2[T](body: => T): (Long, T) = { - var milliSeconds = 0L - val result = timed(x => milliSeconds = x)(body) - - (milliSeconds, result) - } - - def resultsToStatistics(results: Iterable[(_, Int)]): (Int, Int) = - (results partition (_._2 == 0)) match { - case (winners, losers) => (winners.size, losers.size) - } - - def recordTestTiming(name: String, milliseconds: Long) = - synchronized { testStatistics(name) = milliseconds } - - def showTestStatistics() { - testStatistics.toList sortBy (-_._2) foreach { case (k, v) => println("%s: %.2f seconds".format(k, (v.toDouble / 1000))) } - } -} diff --git a/src/partest-alternative/scala/tools/partest/Universe.scala b/src/partest-alternative/scala/tools/partest/Universe.scala deleted file mode 100644 index 3dd79e4791..0000000000 --- a/src/partest-alternative/scala/tools/partest/Universe.scala +++ /dev/null @@ -1,96 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest - -import nsc.io._ -import category.AllCategories -import io.Logging - -/** The high level view of the partest infrastructure. - */ -abstract class Universe - extends Entities - with BuildContributors - with Logging - with Dispatcher - with Statistics - with Housekeeping - with Results - with PartestCompilation - with PartestSpec - with Config - with Alarms - with Actions - with Categories { - - /** The abstract values from which all else is derived. */ - def partestDir: Directory - def testBuildDir: Directory - def allCategories: List[TestCategory] - def selectedCategories: List[TestCategory] - - /** Some plausibly abstract types. */ - type TestBuild <: BuildContributor // e.g. quick, pack - type TestCategory <: AbsTestCategory // e.g. pos, neg, run - type TestEntity <: AbsTestEntity // e.g. files/pos/test25.scala - type TestSequence <: AbsTestSequence // e.g. compile, run, diff - - /** Although TestStep isn't much more than Function1 right now, - * it exists this way so it can become more capable. - */ - implicit def f1ToTestStep(f: TestEntity => Boolean): TestStep = - new TestStep { def apply(test: TestEntity) = f(test) } - - abstract class TestStep extends (TestEntity => Boolean) { - def apply(test: TestEntity): Boolean - } - - /** An umbrella category of tests, such as "pos" or "run". - */ - trait AbsTestCategory extends BuildContributor { - type TestSettings - - def kind: String - def testSequence: TestSequence - def denotesTest(location: Path): Boolean - - def createTest(location: Path): TestEntity - def createSettings(entity: TestEntity): TestSettings - def enumerate: List[TestEntity] - } - - /** A single test. It may involve multiple files, but only a - * single path is used to designate it. - */ - trait AbsTestEntity extends BuildContributor { - def category: TestCategory - def location: Path - def onException(x: Throwable): Unit - def testClasspath: String - - /** Most tests will use the sequence defined by the category, - * but the test can override and define a custom sequence. - */ - def testSequence: TestSequence - - /** True if this test recognizes the given path as a piece of it. - * For validation purposes. - */ - def acknowledges(path: Path): Boolean - } - - /** Every TestEntity is partly characterized by a series of actions - * which are applied to the TestEntity in the given order. The test - * passes if all those actions return true, fails otherwise. - */ - trait AbsTestSequence { - def actions: List[TestStep] - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala b/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala deleted file mode 100644 index f8c0133dc1..0000000000 --- a/src/partest-alternative/scala/tools/partest/ant/JavaTask.scala +++ /dev/null @@ -1,57 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest -package ant - -import org.apache.tools.ant.Task -import org.apache.tools.ant.taskdefs.Java -import org.apache.tools.ant.types.Environment - -import scala.tools.nsc.io._ -import scala.tools.nsc.util.ClassPath -import cmd.Spec._ - -class JavaTask extends Java { - override def getTaskName() = "partest" - private val scalaRunnerClass = "scala.tools.nsc.MainGenericRunner" - private val partestRunnerClass = "scala.tools.partest.Runner" - def defaultJvmArgs = "-Xms64M -Xmx768M -Xss768K -XX:MaxPermSize=96M" - - protected def rootDir = prop("partest.rootdir") getOrElse (baseDir / "test").path - protected def partestJVMArgs = prop("partest.jvm.args") getOrElse defaultJvmArgs - protected def runnerArgs = List("-usejavacp", partestRunnerClass, "--javaopts", partestJVMArgs) - - private def baseDir = Directory(getProject.getBaseDir) - private def prop(s: String) = Option(getProject getProperty s) - private def jvmline(s: String) = returning(createJvmarg())(_ setLine s) - private def addArg(s: String) = returning(createArg())(_ setValue s) - - private def newKeyValue(key: String, value: String) = - returning(new Environment.Variable)(x => { x setKey key ; x setValue value }) - - def setDefaults() { - setFork(true) - setFailonerror(true) - getProject.setSystemProperties() - setClassname(scalaRunnerClass) - addSysproperty(newKeyValue("partest.is-in-ant", "true")) - jvmline(partestJVMArgs) - runnerArgs foreach addArg - - // do we want basedir or rootDir to be the cwd? - // setDir(Path(rootDir).jfile) - } - - override def init() = { - super.init() - setDefaults() - } -} - diff --git a/src/partest-alternative/scala/tools/partest/antlib.xml b/src/partest-alternative/scala/tools/partest/antlib.xml deleted file mode 100644 index af36f11368..0000000000 --- a/src/partest-alternative/scala/tools/partest/antlib.xml +++ /dev/null @@ -1,3 +0,0 @@ - - - diff --git a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala b/src/partest-alternative/scala/tools/partest/category/AllCategories.scala deleted file mode 100644 index 1c3f4c9899..0000000000 --- a/src/partest-alternative/scala/tools/partest/category/AllCategories.scala +++ /dev/null @@ -1,20 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest -package category - -trait AllCategories extends Compiler with Analysis with Runner { - self: Universe => - - object Pos extends DirBasedCategory("pos") { lazy val testSequence: TestSequence = List(compile) } - object Neg extends DirBasedCategory("neg") { lazy val testSequence: TestSequence = List(checkFileRequired, not(compile), diff) } - object Run extends DirBasedCategory("run") { lazy val testSequence: TestSequence = List(compile, run, diff) } - object Jvm extends DirBasedCategory("jvm") { lazy val testSequence: TestSequence = List(compile, run, diff) } -} diff --git a/src/partest-alternative/scala/tools/partest/category/Analysis.scala b/src/partest-alternative/scala/tools/partest/category/Analysis.scala deleted file mode 100644 index 944f8c691f..0000000000 --- a/src/partest-alternative/scala/tools/partest/category/Analysis.scala +++ /dev/null @@ -1,64 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest -package category - -import java.lang.{ ClassLoader => JavaClassLoader } -import java.net.URL -import nsc.util.ScalaClassLoader -import nsc.io._ - -class PartestClassLoader(urls: Array[URL], parent: JavaClassLoader) extends ScalaClassLoader.URLClassLoader(urls, parent) { - def this(urls: Array[URL]) = this(urls, null) - def bytes(path: String) = findBytesForClassName(path) - def singleton(path: String) = tryToInitializeClass(path).get getField "MODULE$" get null - - /** Calls a method in an object via reflection. - */ - def apply[T](className: String, methodName: String)(args: Any*): T = { - def fail = error("Reflection failed on %s.%s".format(className, methodName)) - val clazz = tryToLoadClass(className) getOrElse fail - val obj = singleton(className) - val m = clazz.getMethods find (x => x.getName == methodName && x.getParameterTypes.size == args.size) getOrElse fail - - m.invoke(obj, args map (_.asInstanceOf[AnyRef]): _*).asInstanceOf[T] - } -} - -trait Analysis { - self: Universe => - - object Scalap extends DirBasedCategory("scalap") { - val testSequence: TestSequence = List(checkFileRequired, compile, run, diff) - override def denotesTest(p: Path) = p.isDirectory && (p.toDirectory.files exists (_.name == "result.test")) - override def createTest(location: Path) = new ScalapTest(location) - - class ScalapTest(val location: Path) extends TestEntity { - val category = Scalap - val scalapMain = "scala.tools.scalap.Main$" - val scalapMethod = "decompileScala" - - override def classpathPaths = super.classpathPaths :+ build.scalap - override def checkFile = File(location / "result.test") - - private def runnerURLs = build.classpathPaths ::: classpathPaths map (_.toURL) - private def createClassLoader = new PartestClassLoader(runnerURLs.toArray, this.getClass.getClassLoader) - - val isPackageObject = containsString("package object") - val suffix = if (isPackageObject) ".package" else "" - val className = location.name.capitalize + suffix - - override def run() = loggingResult { - def loader = createClassLoader - def bytes = loader.bytes(className) - - trace("scalap %s".format(className)) - if (isDryRun) "" - else loader[String](scalapMain, scalapMethod)(bytes, isPackageObject) - } - } - } -} diff --git a/src/partest-alternative/scala/tools/partest/category/Compiler.scala b/src/partest-alternative/scala/tools/partest/category/Compiler.scala deleted file mode 100644 index 6b65072856..0000000000 --- a/src/partest-alternative/scala/tools/partest/category/Compiler.scala +++ /dev/null @@ -1,140 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest -package category - -import nsc.io._ -import nsc.reporters._ -import nsc.{ Settings, CompilerCommand } -import scala.tools.nsc.interactive.RefinedBuildManager -import util.copyPath - -trait Compiler { - self: Universe => - - /** Resident Compiler. - * $SCALAC -d dir.obj -Xresident -sourcepath . "$@" - */ - object Res extends DirBasedCategory("res") { - lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff) - - override def denotesTest(p: Path) = p.isDirectory && resFile(p).isFile - override def createTest(location: Path) = new ResidentTest(location.toDirectory) - - override def createSettings(entity: TestEntity): TestSettings = - returning(super.createSettings(entity)) { settings => - settings.resident.value = true - settings.sourcepath.value = entity.sourcesDir.path - } - - class ResidentTest(val location: Directory) extends TestEntity { - val category = Res - override def sourcesDir = categoryDir - - override def acknowledges(p: Path) = - super.acknowledges(p) || (resFile(location) isSame p) - - private def residentCompilerCommands = safeLines(resFile(location)) - private def compileResident(global: PartestGlobal, lines: List[String]) = { - def printPrompt = global inform "nsc> " - val results = - lines map { line => - printPrompt - trace("compile " + line) - isDryRun || global.partestCompile(toArgs(line) map (categoryDir / _ path), false) - } - - printPrompt - - /** Note - some res tests are really "neg" style tests, so we can't - * use the return value of the compile. The diff catches failures. - */ - true // results forall (_ == true) - } - - override def compile() = compileResident(newGlobal(Nil)._1, residentCompilerCommands) - } - private[Res] def resFile(p: Path) = p.toFile addExtension "res" - } - - object BuildManager extends DirBasedCategory("buildmanager") { - lazy val testSequence: TestSequence = List(checkFileRequired, compile, diff) - override def denotesTest(p: Path) = p.isDirectory && testFile(p).isFile - override def createTest(location: Path) = new BuildManagerTest(location.toDirectory) - - override def createSettings(entity: TestEntity): TestSettings = - returning[TestSettings](super.createSettings(entity)) { settings => - settings.Ybuildmanagerdebug.value = true - settings.sourcepath.value = entity.sourcesDir.path - } - - class PartestBuildManager(settings: Settings, val reporter: ConsoleReporter) extends RefinedBuildManager(settings) { - def errorFn(msg: String) = Console println msg - - override protected def newCompiler(newSettings: Settings) = - new BuilderGlobal(newSettings, reporter) - - private def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] = - fs flatMap (s => Option(AbstractFile getFile (Path(settings.sourcepath.value) / s path))) toSet - - def buildManagerCompile(line: String): Boolean = { - val prompt = "builder > " - reporter printMessage (prompt + line) - val command = new CompilerCommand(toArgs(line), settings) - val files = filesToSet(settings.sourcepath.value, command.files) - - update(files, Set.empty) - true - } - } - - private[BuildManager] def testFile(p: Path) = (p / p.name addExtension "test").toFile - - class BuildManagerTest(val location: Directory) extends TestEntity { - val category = BuildManager - - override def sourcesDir = outDir - override def sourceFiles = Path onlyFiles (location walkFilter (_ != changesDir) filter isJavaOrScala toList) - override def checkFile = File(location / location.name addExtension "check") - - override def acknowledges(p: Path) = super.acknowledges(p) || (p isSame testFile(location)) - - def buildManagerCommands = safeLines(testFile(location)) - def changesDir = Directory(location / (location.name + ".changes")) - - override def compile() = { - val settings = createSettings(this) - val pbm = new PartestBuildManager(settings, newReporter(settings)) - - // copy files - for (source <- sourceFiles) { - val target = outDir / (location.normalize relativize source) - copyPath(source, target.toFile) - } - - def runUpdate(line: String) = { - val Array(srcName, replacement) = line split "=>" - copyPath(File(changesDir / replacement), File(outDir / srcName)) - } - - def sendCommand(line: String): Boolean = { - val compileRegex = """^>>compile (.*)$""".r - val updateRegex = """^>>update\s+(.*)""".r - trace("send: " + (line drop 2)) - - isDryRun || (line match { - case compileRegex(xs) => pbm.buildManagerCompile(xs) - case updateRegex(line) => runUpdate(line) - }) - } - - // send each line to the build manager - buildManagerCommands forall sendCommand - } - } - } -} - diff --git a/src/partest-alternative/scala/tools/partest/category/Runner.scala b/src/partest-alternative/scala/tools/partest/category/Runner.scala deleted file mode 100644 index add1c55feb..0000000000 --- a/src/partest-alternative/scala/tools/partest/category/Runner.scala +++ /dev/null @@ -1,108 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - -package scala.tools -package partest -package category - -import nsc.io._ - -trait Runner { - self: Universe => - - /** Shootout. - */ - object Shootout extends DirBasedCategory("shootout") { - lazy val testSequence: TestSequence = List(compile, run, diff) - - override def denotesTest(p: Path) = isScala(p) && runner(p).isFile - override def createTest(location: Path) = new ShootoutTest(location.toFile) - - class ShootoutTest(val location: File) extends TestEntity { - val category = Shootout - // The files in shootout are very free form, so acknowledge anything close. - override def acknowledges(p: Path) = - (p.parent.normalize isSame Shootout.root) && (p.name startsWith label) - - private def generated = File(outDir / "test.scala") - private def runnerFile = runner(location) - override def sourceFiles = List(generated) - - override def compile() = { - trace("generate %s from %s, %s".format(tracePath(generated), tracePath(location), tracePath(runnerFile))) - // generate source file (even on dry run, we need the path) - generated.writeAll(location.slurp(), runnerFile.slurp()) - - // compile generated file - super.compile() - } - } - - private[Shootout] def runner(p: Path) = p addExtension "runner" toFile - } - - object Scalacheck extends DirBasedCategory("scalacheck") { - lazy val testSequence: TestSequence = List(compile, run) - override def createTest(location: Path) = new ScalacheckTest(location) - - class ScalacheckTest(val location: Path) extends TestEntity { - val category = Scalacheck - - import build.{ scalacheck, forkjoin } - import org.scalacheck.Properties - import org.scalacheck.Test.{ checkProperties, defaultParams, Result } - - override def classpathPaths = super.classpathPaths ::: List(scalacheck, forkjoin) - private def arrayURLs = Array(scalacheck, outDir) map (_.toURL) - - /** For reasons I'm not entirely clear on, I've written all this - * to avoid a source dependency on scalacheck. - */ - class ScalacheckClassLoader extends PartestClassLoader(arrayURLs, this.getClass.getClassLoader) { - type ScalacheckResult = { def passed: Boolean } - - def propCallback(name: String, passed: Int, discarded: Int): Unit = () - def testCallback(name: String, result: AnyRef): Unit = () - - val test = singleton("Test$") - val params = apply[AnyRef]("org.scalacheck.Test$", "defaultParams")() - val result = apply[Seq[(String, AnyRef)]]("org.scalacheck.Test$", "checkProperties")(test, params, propCallback _, testCallback _) - - def allResults() = - for ((prop, res) <- result) yield { - ScalacheckTest.this.trace("%s: %s".format(prop, res)) - res.asInstanceOf[ScalacheckResult].passed - } - - def check() = allResults forall (_ == true) - } - - override def run() = { - trace("scalacheck runs via classloader with: %s".format(arrayURLs mkString ", ")) - isDryRun || (new ScalacheckClassLoader check) - } - } - } - - object Script extends DirBasedCategory("script") { - val testSequence: TestSequence = List(exec, diff) - override def createTest(location: Path) = new ScriptTest(location) - - class ScriptTest(val location: Path) extends TestEntity { - val category = Script - val scriptFile = if (location.isDirectory) location / (label + ".scala") else location - val argsFile = withExtension("args").toFile - def batFile = scriptFile changeExtension "bat" - def script = if (Properties.isWin) batFile else scriptFile - - override def acknowledges(p: Path) = super.acknowledges(p) || (List(argsFile, batFile) exists (_ isSame p)) - override def execCwd = Some(sourcesDir) - override def argumentsToExec = script.path :: safeArgs(argsFile) - } - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala b/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala deleted file mode 100644 index 59216cf03b..0000000000 --- a/src/partest-alternative/scala/tools/partest/io/ANSIWriter.scala +++ /dev/null @@ -1,58 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - * @author Philipp Haller - */ - -package scala.tools -package partest -package io - -import java.io.{ Writer, PrintWriter, OutputStream, OutputStreamWriter } - -object ANSIWriter { - val NONE = 0 - val SOME = 1 - val MANY = 2 - - def apply(isAnsi: Boolean) = if (isAnsi) MANY else NONE -} -import ANSIWriter._ - -class ANSIWriter(writer: Writer) extends PrintWriter(writer, true) { - def this(out: OutputStream) = this(new OutputStreamWriter(out)) - def colorful: Int = NONE - - protected val manyColors = List( - Console.BOLD + Console.BLACK, - Console.BOLD + Console.GREEN, - Console.BOLD + Console.RED, - Console.BOLD + Console.YELLOW, - Console.RESET - ) - protected val someColors = List( - Console.BOLD + Console.BLACK, - Console.RESET, - Console.BOLD + Console.BLACK, - Console.BOLD + Console.BLACK, - Console.RESET - ) - protected val noColors = List("", "", "", "", "") - - lazy val List(_outline, _success, _failure, _warning, _default) = colorful match { - case NONE => noColors - case SOME => someColors - case MANY => manyColors - case _ => noColors - } - - private def wrprint(msg: String): Unit = synchronized { - print(msg) - flush() - } - - def outline(msg: String) = wrprint(_outline + msg + _default) - def success(msg: String) = wrprint(_success + msg + _default) - def failure(msg: String) = wrprint(_failure + msg + _default) - def warning(msg: String) = wrprint(_warning + msg + _default) - def normal(msg: String) = wrprint(_default + msg) -} diff --git a/src/partest-alternative/scala/tools/partest/io/Diff.java b/src/partest-alternative/scala/tools/partest/io/Diff.java deleted file mode 100644 index 69428d7e7a..0000000000 --- a/src/partest-alternative/scala/tools/partest/io/Diff.java +++ /dev/null @@ -1,873 +0,0 @@ - -package scala.tools.partest.io; - -import java.util.Hashtable; - -/** A class to compare IndexedSeqs of objects. The result of comparison - is a list of change objects which form an - edit script. The objects compared are traditionally lines - of text from two files. Comparison options such as "ignore - whitespace" are implemented by modifying the equals - and hashcode methods for the objects compared. -

- The basic algorithm is described in:
- "An O(ND) Difference Algorithm and its Variations", Eugene Myers, - Algorithmica Vol. 1 No. 2, 1986, p 251. -

- This class outputs different results from GNU diff 1.15 on some - inputs. Our results are actually better (smaller change list, smaller - total size of changes), but it would be nice to know why. Perhaps - there is a memory overwrite bug in GNU diff 1.15. - - @author Stuart D. Gathman, translated from GNU diff 1.15 - Copyright (C) 2000 Business Management Systems, Inc. -

- This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 1, or (at your option) - any later version. -

- This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. -

- You should have received a copy of the - GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - - */ - -public class Diff { - - /** Prepare to find differences between two arrays. Each element of - the arrays is translated to an "equivalence number" based on - the result of equals. The original Object arrays - are no longer needed for computing the differences. They will - be needed again later to print the results of the comparison as - an edit script, if desired. - */ - public Diff(Object[] a,Object[] b) { - Hashtable h = new Hashtable(a.length + b.length); - filevec[0] = new file_data(a,h); - filevec[1] = new file_data(b,h); - } - - /** 1 more than the maximum equivalence value used for this or its - sibling file. */ - private int equiv_max = 1; - - /** When set to true, the comparison uses a heuristic to speed it up. - With this heuristic, for files with a constant small density - of changes, the algorithm is linear in the file size. */ - public boolean heuristic = false; - - /** When set to true, the algorithm returns a guarranteed minimal - set of changes. This makes things slower, sometimes much slower. */ - public boolean no_discards = false; - - private int[] xvec, yvec; /* IndexedSeqs being compared. */ - private int[] fdiag; /* IndexedSeq, indexed by diagonal, containing - the X coordinate of the point furthest - along the given diagonal in the forward - search of the edit matrix. */ - private int[] bdiag; /* IndexedSeq, indexed by diagonal, containing - the X coordinate of the point furthest - along the given diagonal in the backward - search of the edit matrix. */ - private int fdiagoff, bdiagoff; - private final file_data[] filevec = new file_data[2]; - private int cost; - - /** Find the midpoint of the shortest edit script for a specified - portion of the two files. - - We scan from the beginnings of the files, and simultaneously from the ends, - doing a breadth-first search through the space of edit-sequence. - When the two searches meet, we have found the midpoint of the shortest - edit sequence. - - The value returned is the number of the diagonal on which the midpoint lies. - The diagonal number equals the number of inserted lines minus the number - of deleted lines (counting only lines before the midpoint). - The edit cost is stored into COST; this is the total number of - lines inserted or deleted (counting only lines before the midpoint). - - This function assumes that the first lines of the specified portions - of the two files do not match, and likewise that the last lines do not - match. The caller must trim matching lines from the beginning and end - of the portions it is going to specify. - - Note that if we return the "wrong" diagonal value, or if - the value of bdiag at that diagonal is "wrong", - the worst this can do is cause suboptimal diff output. - It cannot cause incorrect diff output. */ - - private int diag (int xoff, int xlim, int yoff, int ylim) { - final int[] fd = fdiag; // Give the compiler a chance. - final int[] bd = bdiag; // Additional help for the compiler. - final int[] xv = xvec; // Still more help for the compiler. - final int[] yv = yvec; // And more and more . . . - final int dmin = xoff - ylim; // Minimum valid diagonal. - final int dmax = xlim - yoff; // Maximum valid diagonal. - final int fmid = xoff - yoff; // Center diagonal of top-down search. - final int bmid = xlim - ylim; // Center diagonal of bottom-up search. - int fmin = fmid, fmax = fmid; // Limits of top-down search. - int bmin = bmid, bmax = bmid; // Limits of bottom-up search. - /* True if southeast corner is on an odd - diagonal with respect to the northwest. */ - final boolean odd = (fmid - bmid & 1) != 0; - - fd[fdiagoff + fmid] = xoff; - bd[bdiagoff + bmid] = xlim; - - for (int c = 1;; ++c) - { - int d; /* Active diagonal. */ - boolean big_snake = false; - - /* Extend the top-down search by an edit step in each diagonal. */ - if (fmin > dmin) - fd[fdiagoff + --fmin - 1] = -1; - else - ++fmin; - if (fmax < dmax) - fd[fdiagoff + ++fmax + 1] = -1; - else - --fmax; - for (d = fmax; d >= fmin; d -= 2) - { - int x, y, oldx, tlo = fd[fdiagoff + d - 1], thi = fd[fdiagoff + d + 1]; - - if (tlo >= thi) - x = tlo + 1; - else - x = thi; - oldx = x; - y = x - d; - while (x < xlim && y < ylim && xv[x] == yv[y]) { - ++x; ++y; - } - if (x - oldx > 20) - big_snake = true; - fd[fdiagoff + d] = x; - if (odd && bmin <= d && d <= bmax && bd[bdiagoff + d] <= fd[fdiagoff + d]) - { - cost = 2 * c - 1; - return d; - } - } - - /* Similar extend the bottom-up search. */ - if (bmin > dmin) - bd[bdiagoff + --bmin - 1] = Integer.MAX_VALUE; - else - ++bmin; - if (bmax < dmax) - bd[bdiagoff + ++bmax + 1] = Integer.MAX_VALUE; - else - --bmax; - for (d = bmax; d >= bmin; d -= 2) - { - int x, y, oldx, tlo = bd[bdiagoff + d - 1], thi = bd[bdiagoff + d + 1]; - - if (tlo < thi) - x = tlo; - else - x = thi - 1; - oldx = x; - y = x - d; - while (x > xoff && y > yoff && xv[x - 1] == yv[y - 1]) { - --x; --y; - } - if (oldx - x > 20) - big_snake = true; - bd[bdiagoff + d] = x; - if (!odd && fmin <= d && d <= fmax && bd[bdiagoff + d] <= fd[fdiagoff + d]) - { - cost = 2 * c; - return d; - } - } - - /* Heuristic: check occasionally for a diagonal that has made - lots of progress compared with the edit distance. - If we have any such, find the one that has made the most - progress and return it as if it had succeeded. - - With this heuristic, for files with a constant small density - of changes, the algorithm is linear in the file size. */ - - if (c > 200 && big_snake && heuristic) - { - int best = 0; - int bestpos = -1; - - for (d = fmax; d >= fmin; d -= 2) - { - int dd = d - fmid; - if ((fd[fdiagoff + d] - xoff)*2 - dd > 12 * (c + (dd > 0 ? dd : -dd))) - { - if (fd[fdiagoff + d] * 2 - dd > best - && fd[fdiagoff + d] - xoff > 20 - && fd[fdiagoff + d] - d - yoff > 20) - { - int k; - int x = fd[fdiagoff + d]; - - /* We have a good enough best diagonal; - now insist that it end with a significant snake. */ - for (k = 1; k <= 20; k++) - if (xvec[x - k] != yvec[x - d - k]) - break; - - if (k == 21) - { - best = fd[fdiagoff + d] * 2 - dd; - bestpos = d; - } - } - } - } - if (best > 0) - { - cost = 2 * c - 1; - return bestpos; - } - - best = 0; - for (d = bmax; d >= bmin; d -= 2) - { - int dd = d - bmid; - if ((xlim - bd[bdiagoff + d])*2 + dd > 12 * (c + (dd > 0 ? dd : -dd))) - { - if ((xlim - bd[bdiagoff + d]) * 2 + dd > best - && xlim - bd[bdiagoff + d] > 20 - && ylim - (bd[bdiagoff + d] - d) > 20) - { - /* We have a good enough best diagonal; - now insist that it end with a significant snake. */ - int k; - int x = bd[bdiagoff + d]; - - for (k = 0; k < 20; k++) - if (xvec[x + k] != yvec[x - d + k]) - break; - if (k == 20) - { - best = (xlim - bd[bdiagoff + d]) * 2 + dd; - bestpos = d; - } - } - } - } - if (best > 0) - { - cost = 2 * c - 1; - return bestpos; - } - } - } - } - - /** Compare in detail contiguous subsequences of the two files - which are known, as a whole, to match each other. - - The results are recorded in the IndexedSeqs filevec[N].changed_flag, by - storing a 1 in the element for each line that is an insertion or deletion. - - The subsequence of file 0 is [XOFF, XLIM) and likewise for file 1. - - Note that XLIM, YLIM are exclusive bounds. - All line numbers are origin-0 and discarded lines are not counted. */ - - private void compareseq (int xoff, int xlim, int yoff, int ylim) { - /* Slide down the bottom initial diagonal. */ - while (xoff < xlim && yoff < ylim && xvec[xoff] == yvec[yoff]) { - ++xoff; ++yoff; - } - /* Slide up the top initial diagonal. */ - while (xlim > xoff && ylim > yoff && xvec[xlim - 1] == yvec[ylim - 1]) { - --xlim; --ylim; - } - - /* Handle simple cases. */ - if (xoff == xlim) - while (yoff < ylim) - filevec[1].changed_flag[1+filevec[1].realindexes[yoff++]] = true; - else if (yoff == ylim) - while (xoff < xlim) - filevec[0].changed_flag[1+filevec[0].realindexes[xoff++]] = true; - else - { - /* Find a point of correspondence in the middle of the files. */ - - int d = diag (xoff, xlim, yoff, ylim); - int c = cost; - int f = fdiag[fdiagoff + d]; - int b = bdiag[bdiagoff + d]; - - if (c == 1) - { - /* This should be impossible, because it implies that - one of the two subsequences is empty, - and that case was handled above without calling `diag'. - Let's verify that this is true. */ - throw new IllegalArgumentException("Empty subsequence"); - } - else - { - /* Use that point to split this problem into two subproblems. */ - compareseq (xoff, b, yoff, b - d); - /* This used to use f instead of b, - but that is incorrect! - It is not necessarily the case that diagonal d - has a snake from b to f. */ - compareseq (b, xlim, b - d, ylim); - } - } - } - - /** Discard lines from one file that have no matches in the other file. - */ - - private void discard_confusing_lines() { - filevec[0].discard_confusing_lines(filevec[1]); - filevec[1].discard_confusing_lines(filevec[0]); - } - - private boolean inhibit = false; - - /** Adjust inserts/deletes of blank lines to join changes - as much as possible. - */ - - private void shift_boundaries() { - if (inhibit) - return; - filevec[0].shift_boundaries(filevec[1]); - filevec[1].shift_boundaries(filevec[0]); - } - - public interface ScriptBuilder { - /** Scan the tables of which lines are inserted and deleted, - producing an edit script. - @param changed0 true for lines in first file which do not match 2nd - @param len0 number of lines in first file - @param changed1 true for lines in 2nd file which do not match 1st - @param len1 number of lines in 2nd file - @return a linked list of changes - or null - */ - public change build_script( - boolean[] changed0,int len0, - boolean[] changed1,int len1 - ); - } - - /** Scan the tables of which lines are inserted and deleted, - producing an edit script in reverse order. */ - - static class ReverseScript implements ScriptBuilder { - public change build_script( - final boolean[] changed0,int len0, - final boolean[] changed1,int len1) - { - change script = null; - int i0 = 0, i1 = 0; - while (i0 < len0 || i1 < len1) { - if (changed0[1+i0] || changed1[1+i1]) { - int line0 = i0, line1 = i1; - - /* Find # lines changed here in each file. */ - while (changed0[1+i0]) ++i0; - while (changed1[1+i1]) ++i1; - - /* Record this change. */ - script = new change(line0, line1, i0 - line0, i1 - line1, script); - } - - /* We have reached lines in the two files that match each other. */ - i0++; i1++; - } - - return script; - } - } - - static class ForwardScript implements ScriptBuilder { - /** Scan the tables of which lines are inserted and deleted, - producing an edit script in forward order. */ - public change build_script( - final boolean[] changed0,int len0, - final boolean[] changed1,int len1) - { - change script = null; - int i0 = len0, i1 = len1; - - while (i0 >= 0 || i1 >= 0) - { - if (changed0[i0] || changed1[i1]) - { - int line0 = i0, line1 = i1; - - /* Find # lines changed here in each file. */ - while (changed0[i0]) --i0; - while (changed1[i1]) --i1; - - /* Record this change. */ - script = new change(i0, i1, line0 - i0, line1 - i1, script); - } - - /* We have reached lines in the two files that match each other. */ - i0--; i1--; - } - - return script; - } - } - - /** Standard ScriptBuilders. */ - public final static ScriptBuilder - forwardScript = new ForwardScript(), - reverseScript = new ReverseScript(); - - /* Report the differences of two files. DEPTH is the current directory - depth. */ - public final change diff_2(final boolean reverse) { - return diff(reverse ? reverseScript : forwardScript); - } - - /** Get the results of comparison as an edit script. The script - is described by a list of changes. The standard ScriptBuilder - implementations provide for forward and reverse edit scripts. - Alternate implementations could, for instance, list common elements - instead of differences. - @param bld an object to build the script from change flags - @return the head of a list of changes - */ - public change diff(final ScriptBuilder bld) { - - /* Some lines are obviously insertions or deletions - because they don't match anything. Detect them now, - and avoid even thinking about them in the main comparison algorithm. */ - - discard_confusing_lines (); - - /* Now do the main comparison algorithm, considering just the - undiscarded lines. */ - - xvec = filevec[0].undiscarded; - yvec = filevec[1].undiscarded; - - int diags = - filevec[0].nondiscarded_lines + filevec[1].nondiscarded_lines + 3; - fdiag = new int[diags]; - fdiagoff = filevec[1].nondiscarded_lines + 1; - bdiag = new int[diags]; - bdiagoff = filevec[1].nondiscarded_lines + 1; - - compareseq (0, filevec[0].nondiscarded_lines, - 0, filevec[1].nondiscarded_lines); - fdiag = null; - bdiag = null; - - /* Modify the results slightly to make them prettier - in cases where that can validly be done. */ - - shift_boundaries (); - - /* Get the results of comparison in the form of a chain - of `struct change's -- an edit script. */ - return bld.build_script( - filevec[0].changed_flag, - filevec[0].buffered_lines, - filevec[1].changed_flag, - filevec[1].buffered_lines - ); - - } - - /** The result of comparison is an "edit script": a chain of change objects. - Each change represents one place where some lines are deleted - and some are inserted. - - LINE0 and LINE1 are the first affected lines in the two files (origin 0). - DELETED is the number of lines deleted here from file 0. - INSERTED is the number of lines inserted here in file 1. - - If DELETED is 0 then LINE0 is the number of the line before - which the insertion was done; vice versa for INSERTED and LINE1. */ - - public static class change { - /** Previous or next edit command. */ - public change link; - /** # lines of file 1 changed here. */ - public final int inserted; - /** # lines of file 0 changed here. */ - public final int deleted; - /** Line number of 1st deleted line. */ - public final int line0; - /** Line number of 1st inserted line. */ - public final int line1; - - /** Cons an additional entry onto the front of an edit script OLD. - LINE0 and LINE1 are the first affected lines in the two files (origin 0). - DELETED is the number of lines deleted here from file 0. - INSERTED is the number of lines inserted here in file 1. - - If DELETED is 0 then LINE0 is the number of the line before - which the insertion was done; vice versa for INSERTED and LINE1. */ - public change(int line0, int line1, int deleted, int inserted, change old) { - this.line0 = line0; - this.line1 = line1; - this.inserted = inserted; - this.deleted = deleted; - this.link = old; - //System.err.println(line0+","+line1+","+inserted+","+deleted); - } - } - - /** Data on one input file being compared. - */ - - class file_data { - - /** Allocate changed array for the results of comparison. */ - void clear() { - /* Allocate a flag for each line of each file, saying whether that line - is an insertion or deletion. - Allocate an extra element, always zero, at each end of each IndexedSeq. - */ - changed_flag = new boolean[buffered_lines + 2]; - } - - /** Return equiv_count[I] as the number of lines in this file - that fall in equivalence class I. - @return the array of equivalence class counts. - */ - int[] equivCount() { - int[] equiv_count = new int[equiv_max]; - for (int i = 0; i < buffered_lines; ++i) - ++equiv_count[equivs[i]]; - return equiv_count; - } - - /** Discard lines that have no matches in another file. - - A line which is discarded will not be considered by the actual - comparison algorithm; it will be as if that line were not in the file. - The file's `realindexes' table maps virtual line numbers - (which don't count the discarded lines) into real line numbers; - this is how the actual comparison algorithm produces results - that are comprehensible when the discarded lines are counted. -

- When we discard a line, we also mark it as a deletion or insertion - so that it will be printed in the output. - @param f the other file - */ - void discard_confusing_lines(file_data f) { - clear(); - /* Set up table of which lines are going to be discarded. */ - final byte[] discarded = discardable(f.equivCount()); - - /* Don't really discard the provisional lines except when they occur - in a run of discardables, with nonprovisionals at the beginning - and end. */ - filterDiscards(discarded); - - /* Actually discard the lines. */ - discard(discarded); - } - - /** Mark to be discarded each line that matches no line of another file. - If a line matches many lines, mark it as provisionally discardable. - @see equivCount() - @param counts The count of each equivalence number for the other file. - @return 0=nondiscardable, 1=discardable or 2=provisionally discardable - for each line - */ - - private byte[] discardable(final int[] counts) { - final int end = buffered_lines; - final byte[] discards = new byte[end]; - final int[] equivs = this.equivs; - int many = 5; - int tem = end / 64; - - /* Multiply MANY by approximate square root of number of lines. - That is the threshold for provisionally discardable lines. */ - while ((tem = tem >> 2) > 0) - many *= 2; - - for (int i = 0; i < end; i++) - { - int nmatch; - if (equivs[i] == 0) - continue; - nmatch = counts[equivs[i]]; - if (nmatch == 0) - discards[i] = 1; - else if (nmatch > many) - discards[i] = 2; - } - return discards; - } - - /** Don't really discard the provisional lines except when they occur - in a run of discardables, with nonprovisionals at the beginning - and end. */ - - private void filterDiscards(final byte[] discards) { - final int end = buffered_lines; - - for (int i = 0; i < end; i++) - { - /* Cancel provisional discards not in middle of run of discards. */ - if (discards[i] == 2) - discards[i] = 0; - else if (discards[i] != 0) - { - /* We have found a nonprovisional discard. */ - int j; - int length; - int provisional = 0; - - /* Find end of this run of discardable lines. - Count how many are provisionally discardable. */ - for (j = i; j < end; j++) - { - if (discards[j] == 0) - break; - if (discards[j] == 2) - ++provisional; - } - - /* Cancel provisional discards at end, and shrink the run. */ - while (j > i && discards[j - 1] == 2) { - discards[--j] = 0; --provisional; - } - - /* Now we have the length of a run of discardable lines - whose first and last are not provisional. */ - length = j - i; - - /* If 1/4 of the lines in the run are provisional, - cancel discarding of all provisional lines in the run. */ - if (provisional * 4 > length) - { - while (j > i) - if (discards[--j] == 2) - discards[j] = 0; - } - else - { - int consec; - int minimum = 1; - int tem = length / 4; - - /* MINIMUM is approximate square root of LENGTH/4. - A subrun of two or more provisionals can stand - when LENGTH is at least 16. - A subrun of 4 or more can stand when LENGTH >= 64. */ - while ((tem = tem >> 2) > 0) - minimum *= 2; - minimum++; - - /* Cancel any subrun of MINIMUM or more provisionals - within the larger run. */ - for (j = 0, consec = 0; j < length; j++) - if (discards[i + j] != 2) - consec = 0; - else if (minimum == ++consec) - /* Back up to start of subrun, to cancel it all. */ - j -= consec; - else if (minimum < consec) - discards[i + j] = 0; - - /* Scan from beginning of run - until we find 3 or more nonprovisionals in a row - or until the first nonprovisional at least 8 lines in. - Until that point, cancel any provisionals. */ - for (j = 0, consec = 0; j < length; j++) - { - if (j >= 8 && discards[i + j] == 1) - break; - if (discards[i + j] == 2) { - consec = 0; discards[i + j] = 0; - } - else if (discards[i + j] == 0) - consec = 0; - else - consec++; - if (consec == 3) - break; - } - - /* I advances to the last line of the run. */ - i += length - 1; - - /* Same thing, from end. */ - for (j = 0, consec = 0; j < length; j++) - { - if (j >= 8 && discards[i - j] == 1) - break; - if (discards[i - j] == 2) { - consec = 0; discards[i - j] = 0; - } - else if (discards[i - j] == 0) - consec = 0; - else - consec++; - if (consec == 3) - break; - } - } - } - } - } - - /** Actually discard the lines. - @param discards flags lines to be discarded - */ - private void discard(final byte[] discards) { - final int end = buffered_lines; - int j = 0; - for (int i = 0; i < end; ++i) - if (no_discards || discards[i] == 0) - { - undiscarded[j] = equivs[i]; - realindexes[j++] = i; - } - else - changed_flag[1+i] = true; - nondiscarded_lines = j; - } - - file_data(Object[] data,Hashtable h) { - buffered_lines = data.length; - - equivs = new int[buffered_lines]; - undiscarded = new int[buffered_lines]; - realindexes = new int[buffered_lines]; - - for (int i = 0; i < data.length; ++i) { - Integer ir = (Integer)h.get(data[i]); - if (ir == null) - h.put(data[i],new Integer(equivs[i] = equiv_max++)); - else - equivs[i] = ir.intValue(); - } - } - - /** Adjust inserts/deletes of blank lines to join changes - as much as possible. - - We do something when a run of changed lines include a blank - line at one end and have an excluded blank line at the other. - We are free to choose which blank line is included. - `compareseq' always chooses the one at the beginning, - but usually it is cleaner to consider the following blank line - to be the "change". The only exception is if the preceding blank line - would join this change to other changes. - @param f the file being compared against - */ - - void shift_boundaries(file_data f) { - final boolean[] changed = changed_flag; - final boolean[] other_changed = f.changed_flag; - int i = 0; - int j = 0; - int i_end = buffered_lines; - int preceding = -1; - int other_preceding = -1; - - for (;;) - { - int start, end, other_start; - - /* Scan forwards to find beginning of another run of changes. - Also keep track of the corresponding point in the other file. */ - - while (i < i_end && !changed[1+i]) - { - while (other_changed[1+j++]) - /* Non-corresponding lines in the other file - will count as the preceding batch of changes. */ - other_preceding = j; - i++; - } - - if (i == i_end) - break; - - start = i; - other_start = j; - - for (;;) - { - /* Now find the end of this run of changes. */ - - while (i < i_end && changed[1+i]) i++; - end = i; - - /* If the first changed line matches the following unchanged one, - and this run does not follow right after a previous run, - and there are no lines deleted from the other file here, - then classify the first changed line as unchanged - and the following line as changed in its place. */ - - /* You might ask, how could this run follow right after another? - Only because the previous run was shifted here. */ - - if (end != i_end - && equivs[start] == equivs[end] - && !other_changed[1+j] - && end != i_end - && !((preceding >= 0 && start == preceding) - || (other_preceding >= 0 - && other_start == other_preceding))) - { - changed[1+end++] = true; - changed[1+start++] = false; - ++i; - /* Since one line-that-matches is now before this run - instead of after, we must advance in the other file - to keep in synch. */ - ++j; - } - else - break; - } - - preceding = i; - other_preceding = j; - } - } - - /** Number of elements (lines) in this file. */ - final int buffered_lines; - - /** IndexedSeq, indexed by line number, containing an equivalence code for - each line. It is this IndexedSeq that is actually compared with that - of another file to generate differences. */ - private final int[] equivs; - - /** IndexedSeq, like the previous one except that - the elements for discarded lines have been squeezed out. */ - final int[] undiscarded; - - /** IndexedSeq mapping virtual line numbers (not counting discarded lines) - to real ones (counting those lines). Both are origin-0. */ - final int[] realindexes; - - /** Total number of nondiscarded lines. */ - int nondiscarded_lines; - - /** Array, indexed by real origin-1 line number, - containing true for a line that is an insertion or a deletion. - The results of comparison are stored here. */ - boolean[] changed_flag; - - } -} diff --git a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java b/src/partest-alternative/scala/tools/partest/io/DiffPrint.java deleted file mode 100644 index 273b6cba52..0000000000 --- a/src/partest-alternative/scala/tools/partest/io/DiffPrint.java +++ /dev/null @@ -1,606 +0,0 @@ - -package scala.tools.partest.io; - -import java.io.*; -import java.util.Vector; -import java.util.Date; -//import com.objectspace.jgl.predicates.UnaryPredicate; - -interface UnaryPredicate { - boolean execute(Object obj); -} - -/** A simple framework for printing change lists produced by Diff. - @see bmsi.util.Diff - @author Stuart D. Gathman - Copyright (C) 2000 Business Management Systems, Inc. -

- This program is free software; you can redistribute it and/or modify - it under the terms of the GNU General Public License as published by - the Free Software Foundation; either version 1, or (at your option) - any later version. -

- This program is distributed in the hope that it will be useful, - but WITHOUT ANY WARRANTY; without even the implied warranty of - MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the - GNU General Public License for more details. -

- You should have received a copy of the GNU General Public License - along with this program; if not, write to the Free Software - Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA. - */ -public class DiffPrint { - /** A Base class for printing edit scripts produced by Diff. - This class divides the change list into "hunks", and calls - print_hunk for each hunk. Various utility methods - are provided as well. - */ - public static abstract class Base { - protected Base(Object[] a,Object[] b, Writer w) { - outfile = new PrintWriter(w); - file0 = a; - file1 = b; - } - /** Set to ignore certain kinds of lines when printing - an edit script. For example, ignoring blank lines or comments. - */ - protected UnaryPredicate ignore = null; - - /** Set to the lines of the files being compared. - */ - protected Object[] file0, file1; - - /** Divide SCRIPT into pieces by calling HUNKFUN and - print each piece with PRINTFUN. - Both functions take one arg, an edit script. - - PRINTFUN takes a subscript which belongs together (with a null - link at the end) and prints it. */ - public void print_script(Diff.change script) { - Diff.change next = script; - - while (next != null) - { - Diff.change t, end; - - /* Find a set of changes that belong together. */ - t = next; - end = hunkfun(next); - - /* Disconnect them from the rest of the changes, - making them a hunk, and remember the rest for next iteration. */ - next = end.link; - end.link = null; - //if (DEBUG) - // debug_script(t); - - /* Print this hunk. */ - print_hunk(t); - - /* Reconnect the script so it will all be freed properly. */ - end.link = next; - } - outfile.flush(); - } - - /** Called with the tail of the script - and returns the last link that belongs together with the start - of the tail. */ - - protected Diff.change hunkfun(Diff.change hunk) { - return hunk; - } - - protected int first0, last0, first1, last1, deletes, inserts; - protected PrintWriter outfile; - - /** Look at a hunk of edit script and report the range of lines in each file - that it applies to. HUNK is the start of the hunk, which is a chain - of `struct change'. The first and last line numbers of file 0 are stored - in *FIRST0 and *LAST0, and likewise for file 1 in *FIRST1 and *LAST1. - Note that these are internal line numbers that count from 0. - - If no lines from file 0 are deleted, then FIRST0 is LAST0+1. - - Also set *DELETES nonzero if any lines of file 0 are deleted - and set *INSERTS nonzero if any lines of file 1 are inserted. - If only ignorable lines are inserted or deleted, both are - set to 0. */ - - protected void analyze_hunk(Diff.change hunk) { - int f0, l0 = 0, f1, l1 = 0, show_from = 0, show_to = 0; - int i; - Diff.change next; - boolean nontrivial = (ignore == null); - - show_from = show_to = 0; - - f0 = hunk.line0; - f1 = hunk.line1; - - for (next = hunk; next != null; next = next.link) - { - l0 = next.line0 + next.deleted - 1; - l1 = next.line1 + next.inserted - 1; - show_from += next.deleted; - show_to += next.inserted; - for (i = next.line0; i <= l0 && ! nontrivial; i++) - if (!ignore.execute(file0[i])) - nontrivial = true; - for (i = next.line1; i <= l1 && ! nontrivial; i++) - if (!ignore.execute(file1[i])) - nontrivial = true; - } - - first0 = f0; - last0 = l0; - first1 = f1; - last1 = l1; - - /* If all inserted or deleted lines are ignorable, - tell the caller to ignore this hunk. */ - - if (!nontrivial) - show_from = show_to = 0; - - deletes = show_from; - inserts = show_to; - } - - /** Print the script header which identifies the files compared. */ - protected void print_header(String filea, String fileb) { } - - protected abstract void print_hunk(Diff.change hunk); - - protected void print_1_line(String pre,Object linbuf) { - outfile.println(pre + linbuf.toString()); - } - - /** Print a pair of line numbers with SEPCHAR, translated for file FILE. - If the two numbers are identical, print just one number. - - Args A and B are internal line numbers. - We print the translated (real) line numbers. */ - - protected void print_number_range (char sepchar, int a, int b) { - /* Note: we can have B < A in the case of a range of no lines. - In this case, we should print the line number before the range, - which is B. */ - if (++b > ++a) - outfile.print("" + a + sepchar + b); - else - outfile.print(b); - } - - public static char change_letter(int inserts, int deletes) { - if (inserts == 0) - return 'd'; - else if (deletes == 0) - return 'a'; - else - return 'c'; - } - } - - /** Print a change list in the standard diff format. - */ - public static class NormalPrint extends Base { - - public NormalPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - /** Print a hunk of a normal diff. - This is a contiguous portion of a complete edit script, - describing changes in consecutive lines. */ - - protected void print_hunk (Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - analyze_hunk(hunk); - if (deletes == 0 && inserts == 0) - return; - - /* Print out the line number header for this hunk */ - print_number_range (',', first0, last0); - outfile.print(change_letter(inserts, deletes)); - print_number_range (',', first1, last1); - outfile.println(); - - /* Print the lines that the first file has. */ - if (deletes != 0) - for (int i = first0; i <= last0; i++) - print_1_line ("< ", file0[i]); - - if (inserts != 0 && deletes != 0) - outfile.println("---"); - - /* Print the lines that the second file has. */ - if (inserts != 0) - for (int i = first1; i <= last1; i++) - print_1_line ("> ", file1[i]); - } - } - - /** Prints an edit script in a format suitable for input to ed. - The edit script must be generated with the reverse option to - be useful as actual ed input. - */ - public static class EdPrint extends Base { - - public EdPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - /** Print a hunk of an ed diff */ - protected void print_hunk(Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - analyze_hunk (hunk); - if (deletes == 0 && inserts == 0) - return; - - /* Print out the line number header for this hunk */ - print_number_range (',', first0, last0); - outfile.println(change_letter(inserts, deletes)); - - /* Print new/changed lines from second file, if needed */ - if (inserts != 0) - { - boolean inserting = true; - for (int i = first1; i <= last1; i++) - { - /* Resume the insert, if we stopped. */ - if (! inserting) - outfile.println(i - first1 + first0 + "a"); - inserting = true; - - /* If the file's line is just a dot, it would confuse `ed'. - So output it with a double dot, and set the flag LEADING_DOT - so that we will output another ed-command later - to change the double dot into a single dot. */ - - if (".".equals(file1[i])) - { - outfile.println(".."); - outfile.println("."); - /* Now change that double dot to the desired single dot. */ - outfile.println(i - first1 + first0 + 1 + "s/^\\.\\././"); - inserting = false; - } - else - /* Line is not `.', so output it unmodified. */ - print_1_line ("", file1[i]); - } - - /* End insert mode, if we are still in it. */ - if (inserting) - outfile.println("."); - } - } - } - - /** Prints an edit script in context diff format. This and its - 'unified' variation is used for source code patches. - */ - public static class ContextPrint extends Base { - - protected int context = 3; - - public ContextPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - protected void print_context_label (String mark, File inf, String label) { - if (label != null) - outfile.println(mark + ' ' + label); - else if (inf.lastModified() > 0) - // FIXME: use DateFormat to get precise format needed. - outfile.println( - mark + ' ' + inf.getPath() + '\t' + new Date(inf.lastModified()) - ); - else - /* Don't pretend that standard input is ancient. */ - outfile.println(mark + ' ' + inf.getPath()); - } - - public void print_header(String filea,String fileb) { - print_context_label ("***", new File(filea), filea); - print_context_label ("---", new File(fileb), fileb); - } - - /** If function_regexp defined, search for start of function. */ - private String find_function(Object[] lines, int start) { - return null; - } - - protected void print_function(Object[] file,int start) { - String function = find_function (file0, first0); - if (function != null) { - outfile.print(" "); - outfile.print( - (function.length() < 40) ? function : function.substring(0,40) - ); - } - } - - protected void print_hunk(Diff.change hunk) { - - /* Determine range of line numbers involved in each file. */ - - analyze_hunk (hunk); - - if (deletes == 0 && inserts == 0) - return; - - /* Include a context's width before and after. */ - - first0 = Math.max(first0 - context, 0); - first1 = Math.max(first1 - context, 0); - last0 = Math.min(last0 + context, file0.length - 1); - last1 = Math.min(last1 + context, file1.length - 1); - - - outfile.print("***************"); - - /* If we looked for and found a function this is part of, - include its name in the header of the diff section. */ - print_function (file0, first0); - - outfile.println(); - outfile.print("*** "); - print_number_range (',', first0, last0); - outfile.println(" ****"); - - if (deletes != 0) { - Diff.change next = hunk; - - for (int i = first0; i <= last0; i++) { - /* Skip past changes that apply (in file 0) - only to lines before line I. */ - - while (next != null && next.line0 + next.deleted <= i) - next = next.link; - - /* Compute the marking for line I. */ - - String prefix = " "; - if (next != null && next.line0 <= i) - /* The change NEXT covers this line. - If lines were inserted here in file 1, this is "changed". - Otherwise it is "deleted". */ - prefix = (next.inserted > 0) ? "!" : "-"; - - print_1_line (prefix, file0[i]); - } - } - - outfile.print("--- "); - print_number_range (',', first1, last1); - outfile.println(" ----"); - - if (inserts != 0) { - Diff.change next = hunk; - - for (int i = first1; i <= last1; i++) { - /* Skip past changes that apply (in file 1) - only to lines before line I. */ - - while (next != null && next.line1 + next.inserted <= i) - next = next.link; - - /* Compute the marking for line I. */ - - String prefix = " "; - if (next != null && next.line1 <= i) - /* The change NEXT covers this line. - If lines were deleted here in file 0, this is "changed". - Otherwise it is "inserted". */ - prefix = (next.deleted > 0) ? "!" : "+"; - - print_1_line (prefix, file1[i]); - } - } - } - } - - /** Prints an edit script in context diff format. This and its - 'unified' variation is used for source code patches. - */ - public static class UnifiedPrint extends ContextPrint { - - public UnifiedPrint(Object[] a,Object[] b, Writer w) { - super(a,b,w); - } - - public void print_header(String filea,String fileb) { - print_context_label ("---", new File(filea), filea); - print_context_label ("+++", new File(fileb), fileb); - } - - private void print_number_range (int a, int b) { - //translate_range (file, a, b, &trans_a, &trans_b); - - /* Note: we can have B < A in the case of a range of no lines. - In this case, we should print the line number before the range, - which is B. */ - if (b < a) - outfile.print(b + ",0"); - else - super.print_number_range(',',a,b); - } - - protected void print_hunk(Diff.change hunk) { - /* Determine range of line numbers involved in each file. */ - analyze_hunk (hunk); - - if (deletes == 0 && inserts == 0) - return; - - /* Include a context's width before and after. */ - - first0 = Math.max(first0 - context, 0); - first1 = Math.max(first1 - context, 0); - last0 = Math.min(last0 + context, file0.length - 1); - last1 = Math.min(last1 + context, file1.length - 1); - - - - outfile.print("@@ -"); - print_number_range (first0, last0); - outfile.print(" +"); - print_number_range (first1, last1); - outfile.print(" @@"); - - /* If we looked for and found a function this is part of, - include its name in the header of the diff section. */ - print_function(file0,first0); - - outfile.println(); - - Diff.change next = hunk; - int i = first0; - int j = first1; - - while (i <= last0 || j <= last1) { - - /* If the line isn't a difference, output the context from file 0. */ - - if (next == null || i < next.line0) { - outfile.print(' '); - print_1_line ("", file0[i++]); - j++; - } - else { - /* For each difference, first output the deleted part. */ - - int k = next.deleted; - while (k-- > 0) { - outfile.print('-'); - print_1_line ("", file0[i++]); - } - - /* Then output the inserted part. */ - - k = next.inserted; - while (k-- > 0) { - outfile.print('+'); - print_1_line ("", file1[j++]); - } - - /* We're done with this hunk, so on to the next! */ - - next = next.link; - } - } - } - } - - - /** Read a text file into an array of String. This provides basic diff - functionality. A more advanced diff utility will use specialized - objects to represent the text lines, with options to, for example, - convert sequences of whitespace to a single space for comparison - purposes. - */ - static String[] slurp(String file) throws IOException { - BufferedReader rdr = new BufferedReader(new FileReader(file)); - Vector s = new Vector(); - for (;;) { - String line = rdr.readLine(); - if (line == null) break; - s.addElement(line); - } - String[] a = new String[s.size()]; - s.copyInto(a); - return a; - } - - public static void main(String[] argv) throws IOException { - String filea = argv[argv.length - 2]; - String fileb = argv[argv.length - 1]; - String[] a = slurp(filea); - String[] b = slurp(fileb); - Diff d = new Diff(a,b); - char style = 'n'; - for (int i = 0; i < argv.length - 2; ++i) { - String f = argv[i]; - if (f.startsWith("-")) { - for (int j = 1; j < f.length(); ++j) { - switch (f.charAt(j)) { - case 'e': // Ed style - style = 'e'; break; - case 'c': // Context diff - style = 'c'; break; - case 'u': - style = 'u'; break; - } - } - } - } - boolean reverse = style == 'e'; - Diff.change script = d.diff_2(reverse); - if (script == null) - System.err.println("No differences"); - else { - Base p; - Writer w = new OutputStreamWriter(System.out); - switch (style) { - case 'e': - p = new EdPrint(a,b,w); break; - case 'c': - p = new ContextPrint(a,b,w); break; - case 'u': - p = new UnifiedPrint(a,b,w); break; - default: - p = new NormalPrint(a,b,w); - } - p.print_header(filea,fileb); - p.print_script(script); - } - } - - public static void doDiff(String[] argv, Writer w) throws IOException { - String filea = argv[argv.length - 2]; - String fileb = argv[argv.length - 1]; - String[] a = slurp(filea); - String[] b = slurp(fileb); - Diff d = new Diff(a,b); - char style = 'n'; - for (int i = 0; i < argv.length - 2; ++i) { - String f = argv[i]; - if (f.startsWith("-")) { - for (int j = 1; j < f.length(); ++j) { - switch (f.charAt(j)) { - case 'e': // Ed style - style = 'e'; break; - case 'c': // Context diff - style = 'c'; break; - case 'u': - style = 'u'; break; - } - } - } - } - boolean reverse = style == 'e'; - Diff.change script = d.diff_2(reverse); - if (script == null) - w.write("No differences\n"); - else { - Base p; - switch (style) { - case 'e': - p = new EdPrint(a,b,w); break; - case 'c': - p = new ContextPrint(a,b,w); break; - case 'u': - p = new UnifiedPrint(a,b,w); break; - default: - p = new NormalPrint(a,b,w); - } - p.print_header(filea,fileb); - p.print_script(script); - } - } - -} diff --git a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala b/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala deleted file mode 100644 index ddb3bc23fd..0000000000 --- a/src/partest-alternative/scala/tools/partest/io/JUnitReport.scala +++ /dev/null @@ -1,38 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest -package io - -/** This is disabled for the moment but I can fix it up if anyone - * is using it. - */ -class JUnitReport { - // create JUnit Report xml files if directory was specified - // def junitReport(dir: Directory) = { - // dir.mkdir() - // val report = testReport(set.kind, results, succs, fails) - // XML.save("%s/%s.xml".format(d.toAbsolute.path, set.kind), report) - // } - - // def oneResult(res: (TestEntity, Int)) = - // { - // res._2 match { - // case 0 => scala.xml.NodeSeq.Empty - // case 1 => - // case 2 => - // } - // } - // - // def testReport(kind: String, results: Iterable[(TestEntity, Int)], succs: Int, fails: Int) = { - // - // - // { - // results.map(oneResult(_)) - // } - // - // } - // -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/io/Logging.scala b/src/partest-alternative/scala/tools/partest/io/Logging.scala deleted file mode 100644 index 52239ffb2c..0000000000 --- a/src/partest-alternative/scala/tools/partest/io/Logging.scala +++ /dev/null @@ -1,137 +0,0 @@ -package scala.tools -package partest -package io - -import java.io.{ StringWriter, PrintWriter, Writer } -import scala.tools.nsc.io._ -import scala.util.control.ControlThrowable - -trait Logging { - universe: Universe => - - class PartestANSIWriter extends ANSIWriter(Console.out) { - override def colorful: Int = ANSIWriter(universe.isAnsi) - private def printIf(cond: Boolean, msg: String) = - if (cond) { outline("debug: ") ; println(msg) } - - val verbose = printIf(isVerbose || isDebug, _: String) - val debug = printIf(isDebug, _: String) - } - - lazy val NestUI = new PartestANSIWriter() - - import NestUI.{ _outline, _success, _failure, _warning, _default } - - def markOutline(msg: String) = _outline + msg + _default - def markSuccess(msg: String) = _success + msg + _default - def markFailure(msg: String) = _failure + msg + _default - def markWarning(msg: String) = _warning + msg + _default - def markNormal(msg: String) = _default + msg - - def outline(msg: String) = NestUI outline msg - def success(msg: String) = NestUI success msg - def failure(msg: String) = NestUI failure msg - def warning(msg: String) = NestUI warning msg - def normal(msg: String) = NestUI normal msg - - def verbose(msg: String) = NestUI verbose msg - def debug(msg: String) = NestUI debug msg - - trait EntityLogging { - self: TestEntity => - - lazy val logWriter = new LogWriter(logFile) - - /** Redirect stdout and stderr to logFile, run body, return result. - */ - def loggingOutAndErr[T](body: => T): T = { - val log = logFile.printStream(append = true) - - try Console.withOut(log) { - Console.withErr(log) { - body - } - } - finally log.close() - } - - /** What to print in a failure summary. - */ - def failureMessage() = if (diffOutput != "") diffOutput else safeSlurp(logFile) - - /** For tracing. Outputs a line describing the next action. tracePath - * is a path wrapper which prints name or full path depending on verbosity. - */ - def trace(msg: String) = if (isTrace || isDryRun) System.err.println(">> [%s] %s".format(label, msg)) - - def tracePath(path: Path): String = if (isVerbose) path.path else path.name - def tracePath(path: String): String = tracePath(Path(path)) - - /** v == verbose. - */ - def vtrace(msg: String) = if (isVerbose) trace(msg) - - /** Run body, writes result to logFile. Any throwable is - * caught, stringified, and written to the log. - */ - def loggingResult(body: => String) = - try returning(true)(_ => logFile writeAll body) - catch { - case x: ControlThrowable => throw x - case x: InterruptedException => debug(this + " received interrupt, failing.\n") ; false - case x: Throwable => logException(x) - } - - def throwableToString(x: Throwable): String = { - val w = new StringWriter - x.printStackTrace(new PrintWriter(w)) - w.toString - } - - def warnAndLog(str: String) = { - warning(toStringTrunc(str, 800)) - logWriter append str - } - - def warnAndLogException(msg: String, ex: Throwable) = - warnAndLog(msg + throwableToString(ex)) - - def deleteLog(force: Boolean = false) = - if (universe.isNoCleanup && !force) debug("Not cleaning up " + logFile) - else logFile.deleteIfExists() - - def onException(x: Throwable) { logException(x) } - def logException(x: Throwable) = { - val msg = throwableToString(x) - if (!isTerse) - normal(msg) - - logWriter append msg - false - } - } - - /** A writer which doesn't create the file until a write comes in. - */ - class LazilyCreatedWriter(log: File) extends Writer { - @volatile private var isCreated = false - private lazy val underlying = { - isCreated = true - log.bufferedWriter() - } - - def flush() = if (isCreated) underlying.flush() - def close() = if (isCreated) underlying.close() - def write(chars: Array[Char], off: Int, len: Int) = { - underlying.write(chars, off, len) - underlying.flush() - } - } - - class LogWriter(log: File) extends PrintWriter(new LazilyCreatedWriter(log), true) { - override def print(s: String) = { - super.print(s) - flush() - } - } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/package.scala b/src/partest-alternative/scala/tools/partest/package.scala deleted file mode 100644 index 9c515aa2f4..0000000000 --- a/src/partest-alternative/scala/tools/partest/package.scala +++ /dev/null @@ -1,45 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools - -import nsc.io.{ File, Path, Process, Directory } -import java.nio.charset.CharacterCodingException - -package object partest { - /** The CharacterCodingExceptions are thrown at least on windows trying - * to read a file like script/utf-8.scala - */ - private[partest] def safeSlurp(f: File) = - try if (f.exists) f.slurp() else "" - catch { case _: CharacterCodingException => "" } - - private[partest] def safeLines(f: File) = safeSlurp(f) split """\r\n|\r|\n""" toList - private[partest] def safeArgs(f: File) = toArgs(safeSlurp(f)) - private[partest] def isJava(f: Path) = f.isFile && (f hasExtension "java") - private[partest] def isScala(f: Path) = f.isFile && (f hasExtension "scala") - private[partest] def isJavaOrScala(f: Path) = isJava(f) || isScala(f) - - private[partest] def toArgs(line: String) = cmd toArgs line - private[partest] def fromArgs(args: List[String]) = cmd fromArgs args - - /** Strings, argument lists, etc. */ - - private[partest] def fromAnyArgs(args: List[Any]) = args mkString " " // separate to avoid accidents - private[partest] def toStringTrunc(x: Any, max: Int = 240) = { - val s = x.toString - if (s.length < max) s - else (s take max) + " [...]" - } - private[partest] def setProp(k: String, v: String) = scala.util.Properties.setProp(k, v) - - /** Pretty self explanatory. */ - def printAndExit(msg: String): Unit = { - println(msg) - exit(1) - } - - /** Apply a function and return the passed value */ - def returning[T](x: T)(f: T => Unit): T = { f(x) ; x } -} \ No newline at end of file diff --git a/src/partest-alternative/scala/tools/partest/util/package.scala b/src/partest-alternative/scala/tools/partest/util/package.scala deleted file mode 100644 index c34d641db1..0000000000 --- a/src/partest-alternative/scala/tools/partest/util/package.scala +++ /dev/null @@ -1,61 +0,0 @@ -/* NEST (New Scala Test) - * Copyright 2007-2011 LAMP/EPFL - */ - -package scala.tools -package partest - -import java.util.{ Timer, TimerTask } -import java.io.StringWriter -import nsc.io._ - -/** Misc code still looking for a good home. - */ -package object util { - - def allPropertiesString() = javaHashtableToString(System.getProperties) - - private def javaHashtableToString(table: java.util.Hashtable[_,_]) = { - import collection.JavaConversions._ - (table.toList map { case (k, v) => "%s -> %s\n".format(k, v) }).sorted mkString - } - - def filesToSet(pre: String, fs: List[String]): Set[AbstractFile] = - fs flatMap (x => Option(AbstractFile getFile (Path(pre) / x).path)) toSet - - /** Copies one Path to another Path, trying to be sensible when one or the - * other is a Directory. Returns true if it believes it succeeded. - */ - def copyPath(from: Path, to: Path): Boolean = { - if (!to.parent.isDirectory) - to.parent.createDirectory(force = true) - - def copyDir = { - val sub = to / from.name createDirectory true - from.toDirectory.list forall (x => copyPath(x, sub)) - } - (from.isDirectory, to.isDirectory) match { - case (true, true) => copyDir - case (true, false) => false - case (false, true) => from.toFile copyTo (to / from.name) - case (false, false) => from.toFile copyTo to - } - } - - /** - * Compares two files using a Java implementation of the GNU diff - * available at http://www.bmsi.com/java/#diff. - * - * @param f1 the first file to be compared - * @param f2 the second file to be compared - * @return the text difference between the compared files - */ - def diffFiles(f1: File, f2: File): String = { - val diffWriter = new StringWriter - val args = Array(f1.toAbsolute.path, f2.toAbsolute.path) - - io.DiffPrint.doDiff(args, diffWriter) - val result = diffWriter.toString - if (result == "No differences") "" else result - } -} -- cgit v1.2.3 From a7aeddd038bead9cda67a85a922a7988b459ff04 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 26 Jan 2012 06:58:17 -0800 Subject: Fix for recently induced -optimise crasher. "Induced" but not in my estimation "caused". Would like to understand why the enclosed test case crashes under -optimise without this change to AddInterfaces. --- .../tools/nsc/symtab/classfile/ClassfileParser.scala | 7 ++++++- .../scala/tools/nsc/transform/AddInterfaces.scala | 10 ++++++++-- test/files/pos/trait-force-info.flags | 1 + test/files/pos/trait-force-info.scala | 18 ++++++++++++++++++ 4 files changed, 33 insertions(+), 3 deletions(-) create mode 100644 test/files/pos/trait-force-info.flags create mode 100644 test/files/pos/trait-force-info.scala diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 811bb6ee05..5e0fcb4bdc 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1208,7 +1208,12 @@ abstract class ClassfileParser { atPhase(currentRun.typerPhase)(getMember(sym, innerName.toTypeName)) else getMember(sym, innerName.toTypeName) - assert(s ne NoSymbol, sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members) + + assert(s ne NoSymbol, + "" + ((externalName, outerName, innerName, sym.fullLocationString)) + " / " + + " while parsing " + ((in.file, busy)) + + sym + "." + innerName + " linkedModule: " + sym.companionModule + sym.companionModule.info.members + ) s case None => diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index 1c41e68532..b4e1956cf4 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -84,8 +84,14 @@ abstract class AddInterfaces extends InfoTransform { atPhase(implClassPhase) { log("%s.implClass == %s".format(iface, iface.implClass)) val implName = nme.implClassName(iface.name) - var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol - impl.info + var impl = if (iface.owner.isClass) iface.owner.info.decl(implName) else NoSymbol + + // !!! Why does forcing the impl's info here lead to a crash? + // See test case pos/trait-force-info.scala for a minimization. + // It crashes like this: + // + // [log lazyvals] trait ContextTrees.implClass == class ContextTrees$class + // error: java.lang.AssertionError: assertion failed: (scala.tools.nsc.typechecker.Contexts$NoContext$,scala.tools.nsc.typechecker.Contexts,NoContext$,trait Contexts in package typechecker) / while parsing (/scala/trunk/build/pack/lib/scala-compiler.jar(scala/tools/nsc/interactive/ContextTrees$class.class),Some(class ContextTrees$class))trait Contexts.NoContext$ linkedModule: List() val originalImpl = impl val originalImplString = originalImpl.hasFlagsToString(-1L) diff --git a/test/files/pos/trait-force-info.flags b/test/files/pos/trait-force-info.flags new file mode 100644 index 0000000000..eb4d19bcb9 --- /dev/null +++ b/test/files/pos/trait-force-info.flags @@ -0,0 +1 @@ +-optimise \ No newline at end of file diff --git a/test/files/pos/trait-force-info.scala b/test/files/pos/trait-force-info.scala new file mode 100644 index 0000000000..e01d225c84 --- /dev/null +++ b/test/files/pos/trait-force-info.scala @@ -0,0 +1,18 @@ +/** This does NOT crash unless it's in the interactive package. + */ + +package scala.tools.nsc +package interactive + +trait MyContextTrees { + val self: Global + val NoContext = self.analyzer.NoContext +} +// +// error: java.lang.AssertionError: assertion failed: trait Contexts.NoContext$ linkedModule: List() +// at scala.Predef$.assert(Predef.scala:160) +// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.innerSymbol$1(ClassfileParser.scala:1211) +// at scala.tools.nsc.symtab.classfile.ClassfileParser$innerClasses$.classSymbol(ClassfileParser.scala:1223) +// at scala.tools.nsc.symtab.classfile.ClassfileParser.classNameToSymbol(ClassfileParser.scala:489) +// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:757) +// at scala.tools.nsc.symtab.classfile.ClassfileParser.sig2type$1(ClassfileParser.scala:789) -- cgit v1.2.3 From c608620531dcb47da43172c53891321c7beb98b0 Mon Sep 17 00:00:00 2001 From: aleksandar Date: Thu, 26 Jan 2012 16:38:54 +0100 Subject: Set fields in immutable hash maps and hash sets to vals. This is part of an effort to make the immutable collections (more) thread safe. The `::` still has non-final member fields for head and tail, but there is not much that can be done right now about that, since these fields are used by list buffers. Tried writing a test with unsafe initialization, but could not invent a scenario which actually fails, at least on the JDK6. --- .../scala/collection/immutable/HashMap.scala | 11 ++++---- .../scala/collection/immutable/HashSet.scala | 6 ++--- .../collection/parallel/immutable/ParHashMap.scala | 19 +++++++++----- .../ParallelIterableCheck.scala | 30 +++++++++++----------- 4 files changed, 37 insertions(+), 29 deletions(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 55ce8fa822..9cde20f1df 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -111,7 +111,7 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { // TODO: add HashMap2, HashMap3, ... - class HashMap1[A,+B](private[HashMap] var key: A, private[HashMap] var hash: Int, private[collection] var value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] { + class HashMap1[A,+B](private[collection] val key: A, private[collection] val hash: Int, private[collection] val value: (B @uV), private[collection] var kv: (A,B @uV)) extends HashMap[A,B] { override def size = 1 private[collection] def getKey = key @@ -176,13 +176,14 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { override def iterator: Iterator[(A,B)] = Iterator(ensurePair) override def foreach[U](f: ((A, B)) => U): Unit = f(ensurePair) + // this method may be called multiple times in a multithreaded environment, but that's ok private[HashMap] def ensurePair: (A,B) = if (kv ne null) kv else { kv = (key, value); kv } protected override def merge0[B1 >: B](that: HashMap[A, B1], level: Int, merger: Merger[B1]): HashMap[A, B1] = { that.updated0(key, hash, level, value, kv, merger) } } - private[collection] class HashMapCollision1[A, +B](private[HashMap] var hash: Int, var kvs: ListMap[A, B @uV]) + private[collection] class HashMapCollision1[A, +B](private[collection] val hash: Int, val kvs: ListMap[A, B @uV]) extends HashMap[A, B @uV] { override def size = kvs.size @@ -227,9 +228,9 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { } class HashTrieMap[A, +B]( - private[HashMap] var bitmap: Int, - private[collection] var elems: Array[HashMap[A, B @uV]], - private[HashMap] var size0: Int + private[collection] val bitmap: Int, + private[collection] val elems: Array[HashMap[A, B @uV]], + private[collection] val size0: Int ) extends HashMap[A, B @uV] { /* diff --git a/src/library/scala/collection/immutable/HashSet.scala b/src/library/scala/collection/immutable/HashSet.scala index 8cb19d4f31..79d2fb71cc 100644 --- a/src/library/scala/collection/immutable/HashSet.scala +++ b/src/library/scala/collection/immutable/HashSet.scala @@ -105,7 +105,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { // TODO: add HashSet2, HashSet3, ... - class HashSet1[A](private[HashSet] var key: A, private[HashSet] var hash: Int) extends HashSet[A] { + class HashSet1[A](private[HashSet] val key: A, private[HashSet] val hash: Int) extends HashSet[A] { override def size = 1 override def get0(key: A, hash: Int, level: Int): Boolean = @@ -131,7 +131,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { override def foreach[U](f: A => U): Unit = f(key) } - private[immutable] class HashSetCollision1[A](private[HashSet] var hash: Int, var ks: ListSet[A]) + private[immutable] class HashSetCollision1[A](private[HashSet] val hash: Int, val ks: ListSet[A]) extends HashSet[A] { override def size = ks.size @@ -178,7 +178,7 @@ object HashSet extends ImmutableSetFactory[HashSet] { } - class HashTrieSet[A](private var bitmap: Int, private[collection] var elems: Array[HashSet[A]], private var size0: Int) + class HashTrieSet[A](private val bitmap: Int, private[collection] val elems: Array[HashSet[A]], private val size0: Int) extends HashSet[A] { override def size = size0 diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index 1fec522a93..e785932933 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -304,14 +304,21 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], (K, V), Has evaluateCombiners(trie) trie.asInstanceOf[HashMap[K, Repr]] } - private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): Unit = trie match { + private def evaluateCombiners(trie: HashMap[K, Combiner[V, Repr]]): HashMap[K, Repr] = trie match { case hm1: HashMap.HashMap1[_, _] => - hm1.asInstanceOf[HashMap.HashMap1[K, Repr]].value = hm1.value.result - hm1.kv = null + val evaledvalue = hm1.value.result + new HashMap.HashMap1[K, Repr](hm1.key, hm1.hash, evaledvalue, null) case hmc: HashMap.HashMapCollision1[_, _] => - hmc.asInstanceOf[HashMap.HashMapCollision1[K, Repr]].kvs = hmc.kvs map { p => (p._1, p._2.result) } - case htm: HashMap.HashTrieMap[_, _] => - for (hm <- htm.elems) evaluateCombiners(hm) + val evaledkvs = hmc.kvs map { p => (p._1, p._2.result) } + new HashMap.HashMapCollision1[K, Repr](hmc.hash, evaledkvs) + case htm: HashMap.HashTrieMap[k, v] => + var i = 0 + while (i < htm.elems.length) { + htm.elems(i) = evaluateCombiners(htm.elems(i)).asInstanceOf[HashMap[k, v]] + i += 1 + } + htm.asInstanceOf[HashMap[K, Repr]] + case empty => empty.asInstanceOf[HashMap[K, Repr]] } def split = { val fp = howmany / 2 diff --git a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala index fbacb9f45c..8273e302a2 100644 --- a/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala +++ b/test/files/scalacheck/parallel-collections/ParallelIterableCheck.scala @@ -414,21 +414,21 @@ abstract class ParallelIterableCheck[T](collName: String) extends Properties(col }).reduceLeft(_ && _) } - // property("groupBy must be equal") = forAll(collectionPairs) { - // case (t, coll) => - // (for ((f, ind) <- groupByFunctions.zipWithIndex) yield { - // val tgroup = t.groupBy(f) - // val cgroup = coll.groupBy(f) - // if (tgroup != cgroup || cgroup != tgroup) { - // println("from: " + t) - // println("and: " + coll) - // println("groups are: ") - // println(tgroup) - // println(cgroup) - // } - // ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup - // }).reduceLeft(_ && _) - // } + property("groupBy must be equal") = forAll(collectionPairs) { + case (t, coll) => + (for ((f, ind) <- groupByFunctions.zipWithIndex) yield { + val tgroup = t.groupBy(f) + val cgroup = coll.groupBy(f) + if (tgroup != cgroup || cgroup != tgroup) { + println("from: " + t) + println("and: " + coll) + println("groups are: ") + println(tgroup) + println(cgroup) + } + ("operator " + ind) |: tgroup == cgroup && cgroup == tgroup + }).reduceLeft(_ && _) + } } -- cgit v1.2.3 From 67420a8fa31c4570542450238b97e5ca1b54a86f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 26 Jan 2012 08:40:04 -0800 Subject: Intercept assert and require calls. And abort calls, and unhandled exceptions, all so I can supplement the error message with a little of the vast quantity of useful information which we possess but do not reveal. "Details are sketchy," says the officer tasked with investigating the crash, but no longer. Also took the opportunity to eliminate a bunch of one-argument assertions and requirements if I thought there was any chance I'd someday be facing them on the wrong end of an incident. Have you ever dreamed that instead of this experience: % scalac -optimise error: java.lang.AssertionError: assertion failed: Record Record(anonymous class JavaToScala$$anonfun$makeScalaPackage$1,Map()) does not contain a field value owner$1 Things could proceed more like this: % scalac -optimise error: while compiling: src/compiler/scala/reflect/runtime/JavaToScala.scala current phase: closelim library version: version 2.10.0.rdev-4267-2012-01-25-gc94d342 compiler version: version 2.10.0.rdev-4270-2012-01-26-gd540ddf reconstructed args: -Ydead-code -optimise -Yinline -Yclosure-elim -Yinline-handlers -d /tmp error: java.lang.AssertionError: [etc] You are not dreaming! IT'S ALL HAPPENING --- .../scala/reflect/internal/SymbolTable.scala | 7 +- src/compiler/scala/tools/nsc/Global.scala | 122 ++++++++++++++++----- src/compiler/scala/tools/nsc/ast/Trees.scala | 2 +- .../tools/nsc/ast/parser/SyntaxAnalyzer.scala | 2 +- .../scala/tools/nsc/backend/icode/GenICode.scala | 6 +- .../backend/icode/analysis/CopyPropagation.scala | 2 +- src/compiler/scala/tools/nsc/io/AbstractFile.scala | 2 +- .../nsc/symtab/classfile/ClassfileParser.scala | 18 +-- .../tools/nsc/symtab/classfile/ICodeReader.scala | 6 +- .../scala/tools/nsc/transform/AddInterfaces.scala | 2 +- .../scala/tools/nsc/transform/ExplicitOuter.scala | 4 +- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/Contexts.scala | 2 +- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 6 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- .../tools/nsc/typechecker/SuperAccessors.scala | 2 +- .../scala/tools/nsc/typechecker/TreeCheckers.scala | 4 +- .../tools/nsc/typechecker/TypeDiagnostics.scala | 2 - .../scala/tools/nsc/typechecker/Typers.scala | 18 +-- .../scala/tools/nsc/typechecker/Unapplies.scala | 2 +- src/compiler/scala/tools/nsc/util/SourceFile.scala | 4 +- 21 files changed, 143 insertions(+), 74 deletions(-) diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala index 717693fa1f..fb827b0658 100644 --- a/src/compiler/scala/reflect/internal/SymbolTable.scala +++ b/src/compiler/scala/reflect/internal/SymbolTable.scala @@ -33,12 +33,17 @@ abstract class SymbolTable extends api.Universe { def rootLoader: LazyType def log(msg: => AnyRef): Unit - def abort(msg: String): Nothing = throw new FatalError(msg) + def abort(msg: String): Nothing = throw new FatalError(supplementErrorMessage(msg)) + + @deprecated("2.10.0", "Give us a reason") def abort(): Nothing = abort("unknown error") /** Override with final implementation for inlining. */ def debuglog(msg: => String): Unit = if (settings.debug.value) log(msg) def debugwarn(msg: => String): Unit = if (settings.debug.value) Console.err.println(msg) + + /** Overridden when we know more about what was happening during a failure. */ + def supplementErrorMessage(msg: String): String = msg private[scala] def printResult[T](msg: String)(result: T) = { Console.err.println(msg + ": " + result) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 426700f3b2..797ed7e047 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -13,7 +13,7 @@ import scala.tools.util.{ Profiling, PathResolver } import scala.collection.{ mutable, immutable } import io.{ SourceReader, AbstractFile, Path } import reporters.{ Reporter, ConsoleReporter } -import util.{ NoPosition, Exceptional, ClassPath, SourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ShowPickled, ScalaClassLoader, returning } +import util.{ NoPosition, Exceptional, ClassPath, SourceFile, NoSourceFile, Statistics, StatisticsInfo, BatchSourceFile, ScriptSourceFile, ShowPickled, ScalaClassLoader, returning } import scala.reflect.internal.pickling.{ PickleBuffer, PickleFormat } import settings.{ AestheticSettings } @@ -164,6 +164,23 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb if (opt.fatalWarnings) globalError(msg) else reporter.warning(NoPosition, msg) + // Getting in front of Predef's asserts to supplement with more info. + // This has the happy side effect of masking the one argument forms + // of assert and require (but for now I've reproduced them here, + // because there are a million to fix.) + @inline final def assert(assertion: Boolean, message: => Any) { + Predef.assert(assertion, supplementErrorMessage("" + message)) + } + @inline final def assert(assertion: Boolean) { + assert(assertion, "") + } + @inline final def require(requirement: Boolean, message: => Any) { + Predef.require(requirement, supplementErrorMessage("" + message)) + } + @inline final def require(requirement: Boolean) { + require(requirement, "") + } + // Needs to call error to make sure the compile fails. override def abort(msg: String): Nothing = { error(msg) @@ -375,10 +392,13 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb } final def applyPhase(unit: CompilationUnit) { + if ((unit ne null) && unit.exists) + lastSeenSourceFile = unit.source + if (opt.echoFilenames) inform("[running phase " + name + " on " + unit + "]") - val unit0 = currentRun.currentUnit + val unit0 = currentUnit try { currentRun.currentUnit = unit if (!cancelled(unit)) { @@ -387,7 +407,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb } currentRun.advanceUnit } finally { - //assert(currentRun.currentUnit == unit) + //assert(currentUnit == unit) currentRun.currentUnit = unit0 } } @@ -781,9 +801,40 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb curRun = null } + /** There are common error conditions where when the exception hits + * here, currentRun.currentUnit is null. This robs us of the knowledge + * of what file was being compiled when it broke. Since I really + * really want to know, this hack. + */ + private var lastSeenSourceFile: SourceFile = NoSourceFile + /** The currently active run */ - def currentRun: Run = curRun + def currentRun: Run = curRun + def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit + def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile + + /** Don't want to introduce new errors trying to report errors, + * so swallow exceptions. + */ + override def supplementErrorMessage(errorMessage: String): String = try { + """| + | while compiling: %s + | current phase: %s + | library version: %s + | compiler version: %s + | reconstructed args: %s + | + |%s""".stripMargin.format( + currentSource.path, + phase, + scala.util.Properties.versionString, + Properties.versionString, + settings.recreateArgs.mkString(" "), + if (opt.debug) "Current unit body:\n" + currentUnit.body + "\n" + errorMessage else errorMessage + ) + } + catch { case x: Exception => errorMessage } /** The id of the currently active run */ @@ -798,10 +849,40 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb /** A Run is a single execution of the compiler on a sets of units */ class Run { + /** Have been running into too many init order issues with Run + * during erroneous conditions. Moved all these vals up to the + * top of the file so at least they're not trivially null. + */ var isDefined = false + /** The currently compiled unit; set from GlobalPhase */ + var currentUnit: CompilationUnit = NoCompilationUnit + + /** Counts for certain classes of warnings during this run. */ + var deprecationWarnings: List[(Position, String)] = Nil + var uncheckedWarnings: List[(Position, String)] = Nil + + /** A flag whether macro expansions failed */ + var macroExpansionFailed = false + /** To be initialized from firstPhase. */ private var terminalPhase: Phase = NoPhase + private val unitbuf = new mutable.ListBuffer[CompilationUnit] + val compiledFiles = new mutable.HashSet[String] + + /** A map from compiled top-level symbols to their source files */ + val symSource = new mutable.HashMap[Symbol, AbstractFile] + + /** A map from compiled top-level symbols to their picklers */ + val symData = new mutable.HashMap[Symbol, PickleBuffer] + + private var phasec: Int = 0 // phases completed + private var unitc: Int = 0 // units completed this phase + private var _unitbufSize = 0 + + def size = _unitbufSize + override def toString = "scalac Run for:\n " + compiledFiles.toList.sorted.mkString("\n ") + // Calculate where to stop based on settings -Ystop-before or -Ystop-after. // Slightly complicated logic due to wanting -Ystop-before:parser to fail rather // than mysteriously running to completion. @@ -895,16 +976,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb // --------------- Miscellania ------------------------------- - /** The currently compiled unit; set from GlobalPhase */ - var currentUnit: CompilationUnit = _ - - /** Counts for certain classes of warnings during this run. */ - var deprecationWarnings: List[(Position, String)] = Nil - var uncheckedWarnings: List[(Position, String)] = Nil - - /** A flag whether macro expansions failed */ - var macroExpansionFailed = false - /** Progress tracking. Measured in "progress units" which are 1 per * compilation unit per phase completed. * @@ -936,9 +1007,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb } def cancel() { reporter.cancelled = true } - - private var phasec: Int = 0 // phases completed - private var unitc: Int = 0 // units completed this phase + private def currentProgress = (phasec * size) + unitc private def totalProgress = (phaseDescriptors.size - 1) * size // -1: drops terminal phase private def refreshProgress() = if (size > 0) progress(currentProgress, totalProgress) @@ -977,11 +1046,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb // ----------- Units and top-level classes and objects -------- - private val unitbuf = new mutable.ListBuffer[CompilationUnit] - val compiledFiles = new mutable.HashSet[String] - - private var _unitbufSize = 0 - def size = _unitbufSize /** add unit to be compiled in this run */ private def addUnit(unit: CompilationUnit) { @@ -1005,12 +1069,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb */ def units: Iterator[CompilationUnit] = unitbuf.iterator - /** A map from compiled top-level symbols to their source files */ - val symSource = new mutable.HashMap[Symbol, AbstractFile] - - /** A map from compiled top-level symbols to their picklers */ - val symData = new mutable.HashMap[Symbol, PickleBuffer] - def registerPickle(sym: Symbol): Unit = { // Convert all names to the type name: objects don't store pickled data if (opt.showPhase && (opt.showNames exists (x => findNamedMember(x.toTypeName, sym) != NoSymbol))) { @@ -1114,6 +1172,14 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb /** Compile list of units, starting with phase `fromPhase` */ def compileUnits(units: List[CompilationUnit], fromPhase: Phase) { + try compileUnitsInternal(units, fromPhase) + catch { case ex => + globalError(supplementErrorMessage("uncaught exception during compilation: " + ex.getClass.getName)) + throw ex + } + } + + private def compileUnitsInternal(units: List[CompilationUnit], fromPhase: Phase) { units foreach addUnit if (opt.profileAll) { inform("starting CPU profiling on compilation run") diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 3a2c5f61b2..c80b07c44d 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -223,7 +223,7 @@ trait Trees extends reflect.internal.Trees { self: Global => try unit.body = transform(unit.body) catch { case ex: Exception => - println("unhandled exception while transforming "+unit) + println(supplementErrorMessage("unhandled exception while transforming "+unit)) throw ex } } diff --git a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala index b5ec0ceffb..e310611e68 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/SyntaxAnalyzer.scala @@ -24,7 +24,7 @@ abstract class SyntaxAnalyzer extends SubComponent with Parsers with MarkupParse import global._ informProgress("parsing " + unit) unit.body = - if (unit.source.file.name.endsWith(".java")) new JavaUnitParser(unit).parse() + if (unit.isJava) new JavaUnitParser(unit).parse() else if (reporter.incompleteHandled) new UnitParser(unit).parse() else new UnitParser(unit).smartParse() diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index badf5d70d1..3d650ef753 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -179,7 +179,7 @@ abstract class GenICode extends SubComponent { } private def genThrow(expr: Tree, ctx: Context): (Context, TypeKind) = { - require(expr.tpe <:< ThrowableClass.tpe) + require(expr.tpe <:< ThrowableClass.tpe, expr.tpe) val thrownKind = toTypeKind(expr.tpe) val ctx1 = genLoad(expr, ctx, thrownKind) @@ -480,7 +480,7 @@ abstract class GenICode extends SubComponent { */ private def msil_genLoadZeroOfNonEnumValuetype(ctx: Context, kind: TypeKind, pos: Position, leaveAddressOnStackInstead: Boolean) { val REFERENCE(clssym) = kind - assert(loaders.clrTypes.isNonEnumValuetype(clssym)) + assert(loaders.clrTypes.isNonEnumValuetype(clssym), clssym) val local = ctx.makeLocal(pos, clssym.tpe, "tmp") ctx.method.addLocal(local) ctx.bb.emit(CIL_LOAD_LOCAL_ADDRESS(local), pos) @@ -1064,7 +1064,7 @@ abstract class GenICode extends SubComponent { var default: BasicBlock = afterCtx.bb for (caze @ CaseDef(pat, guard, body) <- cases) { - assert(guard == EmptyTree) + assert(guard == EmptyTree, guard) val tmpCtx = ctx1.newBlock pat match { case Literal(value) => diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala index 229bbceb36..f5be82a776 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/CopyPropagation.scala @@ -199,7 +199,7 @@ abstract class CopyPropagation { m foreachBlock { b => in(b) = lattice.bottom out(b) = lattice.bottom - assert(out.contains(b)) + assert(out.contains(b), out) log("Added point: " + b) } m.exh foreach { e => diff --git a/src/compiler/scala/tools/nsc/io/AbstractFile.scala b/src/compiler/scala/tools/nsc/io/AbstractFile.scala index 494eb4d50b..b51cf1228c 100644 --- a/src/compiler/scala/tools/nsc/io/AbstractFile.scala +++ b/src/compiler/scala/tools/nsc/io/AbstractFile.scala @@ -211,7 +211,7 @@ abstract class AbstractFile extends AnyRef with Iterable[AbstractFile] { var start = 0 while (true) { val index = path.indexOf(separator, start) - assert(index < 0 || start < index) + assert(index < 0 || start < index, ((path, directory, start, index))) val name = path.substring(start, if (index < 0) length else index) file = getFile(file, name, if (index < 0) directory else true) if ((file eq null) || index < 0) return file diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 5e0fcb4bdc..ac6dca4422 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -392,7 +392,7 @@ abstract class ClassfileParser { } def getBytes(indices: List[Int]): Array[Byte] = { - assert(!indices.isEmpty) + assert(!indices.isEmpty, indices) var value = values(indices.head).asInstanceOf[Array[Byte]] if (value eq null) { val bytesBuffer = ArrayBuffer.empty[Byte] @@ -679,7 +679,7 @@ abstract class ClassfileParser { var index = 0 val end = sig.length def accept(ch: Char) { - assert(sig(index) == ch) + assert(sig(index) == ch, (sig(index), ch)) index += 1 } def subName(isDelimiter: Char => Boolean): Name = { @@ -736,7 +736,7 @@ abstract class ClassfileParser { } } accept('>') - assert(xs.length > 0) + assert(xs.length > 0, tp) newExistentialType(existentials.toList, typeRef(pre, classSym, xs.toList)) } else if (classSym.isMonomorphicType) { tp @@ -750,7 +750,7 @@ abstract class ClassfileParser { res } case tp => - assert(sig(index) != '<') + assert(sig(index) != '<', tp) tp } @@ -776,7 +776,7 @@ abstract class ClassfileParser { appliedType(definitions.ArrayClass.tpe, List(elemtp)) case '(' => // we need a method symbol. given in line 486 by calling getType(methodSym, ..) - assert(sym ne null) + assert(sym ne null, sig) val paramtypes = new ListBuffer[Type]() while (sig(index) != ')') { paramtypes += objToAny(sig2type(tparams, skiptvs)) @@ -809,7 +809,7 @@ abstract class ClassfileParser { var tparams = classTParams val newTParams = new ListBuffer[Symbol]() if (sig(index) == '<') { - assert(sym != null) + assert(sym != null, sig) index += 1 val start = index while (sig(index) != '>') { @@ -974,18 +974,18 @@ abstract class ClassfileParser { def parseScalaSigBytes: Option[ScalaSigBytes] = { val tag = in.nextByte.toChar - assert(tag == STRING_TAG) + assert(tag == STRING_TAG, tag) Some(ScalaSigBytes(pool getBytes in.nextChar)) } def parseScalaLongSigBytes: Option[ScalaSigBytes] = { val tag = in.nextByte.toChar - assert(tag == ARRAY_TAG) + assert(tag == ARRAY_TAG, tag) val stringCount = in.nextChar val entries = for (i <- 0 until stringCount) yield { val stag = in.nextByte.toChar - assert(stag == STRING_TAG) + assert(stag == STRING_TAG, stag) in.nextChar.toInt } Some(ScalaSigBytes(pool.getBytes(entries.toList))) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala index 3c97122c9c..7d42dabc08 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ICodeReader.scala @@ -434,7 +434,7 @@ abstract class ICodeReader extends ClassfileParser { val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0 size += padding in.bp += padding - assert((pc + size % 4) != 0) + assert((pc + size % 4) != 0, pc) /* var byte1 = in.nextByte; size += 1; while (byte1 == 0) { byte1 = in.nextByte; size += 1; } val default = byte1 << 24 | in.nextByte << 16 | in.nextByte << 8 | in.nextByte; @@ -454,7 +454,7 @@ abstract class ICodeReader extends ClassfileParser { val padding = if ((pc + size) % 4 != 0) 4 - ((pc + size) % 4) else 0 size += padding in.bp += padding - assert((pc + size % 4) != 0) + assert((pc + size % 4) != 0, pc) val default = pc + in.nextInt; size += 4 val npairs = in.nextInt; size += 4 var tags: List[List[Int]] = Nil @@ -988,7 +988,7 @@ abstract class ICodeReader extends ClassfileParser { def enterParam(idx: Int, kind: TypeKind) = { val sym = method.symbol.newVariable(newTermName("par" + idx)).setInfo(kind.toType) val l = new Local(sym, kind, true) - assert(!locals.isDefinedAt(idx)) + assert(!locals.isDefinedAt(idx), locals(idx)) locals += (idx -> List((l, kind))) l } diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index b4e1956cf4..e01bbccf13 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -185,7 +185,7 @@ abstract class AddInterfaces extends InfoTransform { ) def implType(tp: Type): Type = tp match { case ClassInfoType(parents, decls, _) => - assert(phase == implClassPhase) + assert(phase == implClassPhase, tp) ClassInfoType( ObjectClass.tpe +: (parents.tail map mixinToImplClass filter (_.typeSymbol != ObjectClass)) :+ iface.tpe, implDecls(sym, decls), diff --git a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala index 14f3dc16fa..7f7f7e7b65 100644 --- a/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala +++ b/src/compiler/scala/tools/nsc/transform/ExplicitOuter.scala @@ -427,7 +427,7 @@ abstract class ExplicitOuter extends InfoTransform } val t = atPos(tree.pos) { - val context = MatrixContext(currentRun.currentUnit, transform, localTyper, currentOwner, tree.tpe) + val context = MatrixContext(currentUnit, transform, localTyper, currentOwner, tree.tpe) val t_untyped = handlePattern(nselector, ncases, checkExhaustive, context) /* if @switch annotation is present, verify the resulting tree is a Match */ @@ -506,7 +506,7 @@ abstract class ExplicitOuter extends InfoTransform val outerVal = atPos(tree.pos)(qual match { // it's a call between constructors of same class case _: This => - assert(outerParam != NoSymbol) + assert(outerParam != NoSymbol, tree) outerValue case _ => gen.mkAttributedQualifier(qual.tpe.prefix match { diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index a762e44bda..8af8bbc6ca 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -142,7 +142,7 @@ trait ContextErrors { case _ => found } - assert(!found.isErroneous && !req.isErroneous) + assert(!found.isErroneous && !req.isErroneous, (found, req)) issueNormalTypeError(tree, withAddendum(tree.pos)(typeErrorMsg(found, req, infer.isPossiblyMissingArgs(found, req))) ) if (settings.explaintypes.value) diff --git a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala index a1ade61dad..740acbd10f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Contexts.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Contexts.scala @@ -585,7 +585,7 @@ trait Contexts { self: Analyzer => debuglog("collect local implicits " + scope.toList)//DEBUG collectImplicits(scope.toList, NoPrefix) } else if (imports != nextOuter.imports) { - assert(imports.tail == nextOuter.imports) + assert(imports.tail == nextOuter.imports, (imports, nextOuter.imports)) collectImplicitImports(imports.head) } else if (owner.isPackageClass) { // the corresponding package object may contain implicit members. diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 73a43bf4a1..44579400ff 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -81,7 +81,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // we don't transform after typers // (that would require much more sophistication when generating trees, // and the only place that emits Matches after typers is for exception handling anyway) - assert(phase.id <= currentRun.typerPhase.id) + assert(phase.id <= currentRun.typerPhase.id, phase) val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen)) @@ -876,7 +876,7 @@ defined class Foo */ private val reusedBy = new collection.mutable.HashSet[Test] var reuses: Option[Test] = None def registerReuseBy(later: Test): Unit = { - assert(later.reuses.isEmpty) + assert(later.reuses.isEmpty, later.reuses) reusedBy += later later.reuses = Some(this) } @@ -1239,7 +1239,7 @@ defined class Foo */ case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? - assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner) + assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor) d.symbol.lazyAccessor.owner = currentOwner } if(d.symbol.moduleClass ne NoSymbol) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 1a54b26307..a99d09173e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1491,7 +1491,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R def checkSuper(mix: Name) = // term should have been eliminated by super accessors - assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY)) + assert(!(qual.symbol.isTrait && sym.isTerm && mix == tpnme.EMPTY), (qual.symbol, sym, mix)) transformCaseApply(tree, qual match { diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index b109d57554..0ab09b4fec 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -453,7 +453,7 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT if (referencingClass.isSubClass(sym.owner.enclClass) || referencingClass.thisSym.isSubClass(sym.owner.enclClass) || referencingClass.enclosingPackageClass == sym.owner.enclosingPackageClass) { - assert(referencingClass.isClass) + assert(referencingClass.isClass, referencingClass) referencingClass } else if(referencingClass.owner.enclClass != NoSymbol) hostForAccessorOf(sym, referencingClass.owner.enclClass) diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index b0500776fe..ed263cbbef 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -142,11 +142,11 @@ abstract class TreeCheckers extends Analyzer { result } def runWithUnit[T](unit: CompilationUnit)(body: => Unit): Unit = { - val unit0 = currentRun.currentUnit + val unit0 = currentUnit currentRun.currentUnit = unit body currentRun.advanceUnit - assertFn(currentRun.currentUnit == unit, "currentUnit is " + currentRun.currentUnit + ", but unit is " + unit) + assertFn(currentUnit == unit, "currentUnit is " + currentUnit + ", but unit is " + unit) currentRun.currentUnit = unit0 } def check(unit: CompilationUnit) { diff --git a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala index 8c434a8838..4f4087a953 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TypeDiagnostics.scala @@ -39,8 +39,6 @@ trait TypeDiagnostics { import definitions._ import global.typer.{ infer, context } - private def currentUnit = currentRun.currentUnit - /** The common situation of making sure nothing is erroneous could be * nicer if Symbols, Types, and Trees all implemented some common interface * in which isErroneous and similar would be placed. diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 889c04a59b..770b55d6ab 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -455,14 +455,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { @inline final def constrTyperIf(inConstr: Boolean): Typer = if (inConstr) { - assert(context.undetparams.isEmpty) + assert(context.undetparams.isEmpty, context.undetparams) newTyper(context.makeConstructorContext) } else this @inline final def withCondConstrTyper[T](inConstr: Boolean)(f: Typer => T): T = if (inConstr) { - assert(context.undetparams.isEmpty) + assert(context.undetparams.isEmpty, context.undetparams) val c = context.makeConstructorContext typerWithLocalContext(c)(f) } else { @@ -867,7 +867,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } def insertApply(): Tree = { - assert(!inHKMode(mode)) //@M + assert(!inHKMode(mode), modeString(mode)) //@M val qual = adaptToName(tree, nme.apply) match { case id @ Ident(_) => val pre = if (id.symbol.owner.isPackageClass) id.symbol.owner.thisType @@ -948,7 +948,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { applyPossible) insertApply() else if (!context.undetparams.isEmpty && !inPolyMode(mode)) { // (9) - assert(!inHKMode(mode)) //@M + assert(!inHKMode(mode), modeString(mode)) //@M if (inExprModeButNot(mode, FUNmode) && pt.typeSymbol == UnitClass) instantiateExpectingUnit(tree, mode) else @@ -1239,7 +1239,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { }) val outercontext = context.outer - assert(clazz != NoSymbol) + assert(clazz != NoSymbol, templ) val cscope = outercontext.makeNewScope(constr, outercontext.owner) val cbody2 = newTyper(cscope) // called both during completion AND typing. .typePrimaryConstrBody(clazz, @@ -1401,7 +1401,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // attributes(cdef) val clazz = cdef.symbol val typedMods = removeAnnotations(cdef.mods) - assert(clazz != NoSymbol) + assert(clazz != NoSymbol, cdef) reenterTypeParams(cdef.tparams) val tparams1 = cdef.tparams mapConserve (typedTypeDef) val impl1 = typerReportAnyContextErrors(context.make(cdef.impl, clazz, newScope)) { @@ -1611,7 +1611,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { (call, List()) } val (superConstr, superArgs) = decompose(rhs) - assert(superConstr.symbol ne null)//debug + assert(superConstr.symbol ne null, superConstr)//debug val pending = ListBuffer[AbsTypeError]() // an object cannot be allowed to pass a reference to itself to a superconstructor @@ -2521,7 +2521,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { inferExprInstance(fun, tparams) doTypedApply(tree, fun, args, mode, pt) } else { - assert(!inPatternMode(mode)) // this case cannot arise for patterns + assert(!inPatternMode(mode), modeString(mode)) // this case cannot arise for patterns val lenientTargs = protoTypeArgs(tparams, formals, mt.resultApprox, pt) val strictTargs = map2(lenientTargs, tparams)((targ, tparam) => if (targ == WildcardType) tparam.tpe else targ) //@M TODO: should probably be .tpeHK @@ -4414,7 +4414,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // whatever type to tree; we just have to survive until a real error message is issued. tree setType AnyClass.tpe case Import(expr, selectors) => - assert(forInteractive) // should not happen in normal circumstances. + assert(forInteractive, "!forInteractive") // should not happen in normal circumstances. tree setType tree.symbol.tpe case _ => abort("unexpected tree: " + tree.getClass + "\n" + tree)//debug diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index a7cd89621c..19b8632ed7 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -25,7 +25,7 @@ trait Unapplies extends ast.TreeDSL /** returns type list for return type of the extraction */ def unapplyTypeList(ufn: Symbol, ufntpe: Type) = { - assert(ufn.isMethod) + assert(ufn.isMethod, ufn) //Console.println("utl "+ufntpe+" "+ufntpe.typeSymbol) ufn.name match { case nme.unapply => unapplyTypeListFromReturnType(ufntpe) diff --git a/src/compiler/scala/tools/nsc/util/SourceFile.scala b/src/compiler/scala/tools/nsc/util/SourceFile.scala index 4405b3457b..e1ae96da8c 100644 --- a/src/compiler/scala/tools/nsc/util/SourceFile.scala +++ b/src/compiler/scala/tools/nsc/util/SourceFile.scala @@ -34,7 +34,7 @@ abstract class SourceFile { * For regular source files, simply return the argument. */ def positionInUltimateSource(position: Position) = position - override def toString(): String = file.name /* + ":" + content.length */ + override def toString() = file.name def dbg(offset: Int) = (new OffsetPosition(this, offset)).dbgString def path = file.path @@ -61,7 +61,7 @@ object NoSourceFile extends SourceFile { def length = -1 def offsetToLine(offset: Int) = -1 def lineToOffset(index : Int) = -1 - override def toString = "NoSourceFile" + override def toString = "" } object NoFile extends VirtualFile("", "") -- cgit v1.2.3 From 2df3934168f62d8662d5f29c1b0db6119353406d Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Thu, 26 Jan 2012 18:07:49 -0200 Subject: Performance improvements for CommentFactory. A few small improvements to the parsing steps of CommentFactory (both main parsing and wiki format parsing), resulting in a 30% reduction of time spent in def parse, which gives me 5% reduction in docs.lib. Turn docBody into a StringBuilder, to improve concatenation speed. Make WikiParser and CharReader work with String instead of Array[String], to reduce copying of data. Get rid of the implicit conversion from String to Array[String]. Also, adjust a couple of regex to avoid expensive backtracking. The new regex has a different semantics, which is arguably more correct than the former one. There's absolutely no change in the generated docs for Scala anyway. --- .../nsc/doc/model/comment/CommentFactory.scala | 100 +++++++++++---------- 1 file changed, 53 insertions(+), 47 deletions(-) diff --git a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala index ea36eb03c7..efa524503c 100644 --- a/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/model/comment/CommentFactory.scala @@ -196,11 +196,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => /** The start of a scaladoc code block */ protected val CodeBlockStart = - new Regex("""(.*)((?:\{\{\{)|(?:\u000E]*)?>\u000E))(.*)""") + new Regex("""(.*?)((?:\{\{\{)|(?:\u000E]*)?>\u000E))(.*)""") /** The end of a scaladoc code block */ protected val CodeBlockEnd = - new Regex("""(.*)((?:\}\}\})|(?:\u000E\u000E))(.*)""") + new Regex("""(.*?)((?:\}\}\})|(?:\u000E\u000E))(.*)""") /** A key used for a tag map. The key is built from the name of the tag and * from the linked symbol if the tag has one. @@ -250,7 +250,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => * @param remaining The lines that must still recursively be parsed. * @param inCodeBlock Whether the next line is part of a code block (in which no tags must be read). */ def parse0 ( - docBody: String, + docBody: StringBuilder, tags: Map[TagKey, List[String]], lastTagKey: Option[TagKey], remaining: List[String], @@ -258,9 +258,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => ): Comment = remaining match { case CodeBlockStart(before, marker, after) :: ls if (!inCodeBlock) => - if (before.trim != "") - parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, false) - else if (after.trim != "") + if (!before.trim.isEmpty && !after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, false) + else if (!before.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, false) + else if (!after.trim.isEmpty) parse0(docBody, tags, lastTagKey, marker :: after :: ls, true) else lastTagKey match { case Some(key) => @@ -271,24 +273,26 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => } parse0(docBody, tags + (key -> value), lastTagKey, ls, true) case None => - parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, true) + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, true) } case CodeBlockEnd(before, marker, after) :: ls => - if (before.trim != "") - parse0(docBody, tags, lastTagKey, before :: (marker + after) :: ls, true) - else if (after.trim != "") + if (!before.trim.isEmpty && !after.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: after :: ls, true) + if (!before.trim.isEmpty) + parse0(docBody, tags, lastTagKey, before :: marker :: ls, true) + else if (!after.trim.isEmpty) parse0(docBody, tags, lastTagKey, marker :: after :: ls, false) else lastTagKey match { case Some(key) => val value = ((tags get key): @unchecked) match { - case Some(b :: bs) => (b + endOfLine + "}}}") :: bs + case Some(b :: bs) => (b + endOfLine + marker) :: bs case None => oops("lastTagKey set when no tag exists for key") } parse0(docBody, tags + (key -> value), lastTagKey, ls, false) case None => - parse0(docBody + endOfLine + marker, tags, lastTagKey, ls, false) + parse0(docBody append endOfLine append marker, tags, lastTagKey, ls, false) } case SymbolTag(name, sym, body) :: ls if (!inCodeBlock) => @@ -311,8 +315,9 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => parse0(docBody, tags + (key -> value), lastTagKey, ls, inCodeBlock) case line :: ls => - val newBody = if (docBody == "") line else docBody + endOfLine + line - parse0(newBody, tags, lastTagKey, ls, inCodeBlock) + if (docBody.length > 0) docBody append endOfLine + docBody append line + parse0(docBody, tags, lastTagKey, ls, inCodeBlock) case Nil => @@ -350,7 +355,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => } val com = createComment ( - body0 = Some(parseWiki(docBody, pos)), + body0 = Some(parseWiki(docBody.toString, pos)), authors0 = allTags(SimpleTagKey("author")), see0 = allTags(SimpleTagKey("see")), result0 = oneTag(SimpleTagKey("return")), @@ -374,7 +379,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => } - parse0("", Map.empty, None, clean(comment), false) + parse0(new StringBuilder(comment.size), Map.empty, None, clean(comment), false) } @@ -385,7 +390,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => * - Removed all end-of-line whitespace. * - Only `endOfLine` is used to mark line endings. */ def parseWiki(string: String, pos: Position): Body = { - new WikiParser(string.toArray, pos).document() + new WikiParser(string, pos).document() } /** TODO @@ -393,7 +398,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => * @author Ingo Maier * @author Manohar Jonnalagedda * @author Gilles Dubochet */ - protected final class WikiParser(val buffer: Array[Char], pos: Position) extends CharReader(buffer) { wiki => + protected final class WikiParser(val buffer: String, pos: Position) extends CharReader(buffer) { wiki => var summaryParsed = false @@ -411,7 +416,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => def block(): Block = { if (checkSkipInitWhitespace("{{{")) code() - else if (checkSkipInitWhitespace("=")) + else if (checkSkipInitWhitespace('=')) title() else if (checkSkipInitWhitespace("----")) hrule() @@ -493,7 +498,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => def title(): Block = { jumpWhitespace() val inLevel = repeatJump("=") - val text = inline(check(Array.fill(inLevel)('='))) + val text = inline(check("=" * inLevel)) val outLevel = repeatJump("=", inLevel) if (inLevel != outLevel) reportError(pos, "unbalanced or unclosed heading") @@ -734,11 +739,11 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => val pc = char nextChar() // read EOL val ok = { - checkSkipInitWhitespace(Array(endOfLine)) || - checkSkipInitWhitespace(Array('=')) || - checkSkipInitWhitespace(Array('{', '{', '{')) || + checkSkipInitWhitespace(endOfLine) || + checkSkipInitWhitespace('=') || + checkSkipInitWhitespace("{{{") || checkList || - checkSkipInitWhitespace(Array('\u003D')) + checkSkipInitWhitespace('\u003D') } offset = poff char = pc @@ -751,7 +756,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => } } - protected sealed class CharReader(buffer: Array[Char]) { reader => + protected sealed class CharReader(buffer: String) { reader => var char: Char = _ var offset: Int = 0 @@ -760,36 +765,37 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => if (offset >= buffer.length) char = endOfText else { - char = buffer(offset) + char = buffer charAt offset offset += 1 } } - implicit def strintToChars(s: String): Array[Char] = s.toArray - - def store(body: => Unit): String = { - val pre = offset - body - val post = offset - buffer.toArray.slice(pre, post).toString + final def check(chars: String): Boolean = { + val poff = offset + val pc = char + val ok = jump(chars) + offset = poff + char = pc + ok } - final def check(chars: Array[Char]): Boolean = { + def checkSkipInitWhitespace(c: Char): Boolean = { val poff = offset val pc = char - val ok = jump(chars) + jumpWhitespace() + val ok = jump(c) offset = poff char = pc ok } - def checkSkipInitWhitespace(chars: Array[Char]): Boolean = { + def checkSkipInitWhitespace(chars: String): Boolean = { val poff = offset val pc = char jumpWhitespace() val (ok0, chars0) = - if (chars.head == ' ') - (offset > poff, chars.tail) + if (chars.charAt(0) == ' ') + (offset > poff, chars substring 1) else (true, chars) val ok = ok0 && jump(chars0) @@ -825,16 +831,16 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => /** jumps all the characters in chars, consuming them in the process. * @return true only if the correct characters have been jumped */ - final def jump(chars: Array[Char]): Boolean = { + final def jump(chars: String): Boolean = { var index = 0 - while (index < chars.length && char == chars(index) && char != endOfText) { + while (index < chars.length && char == chars.charAt(index) && char != endOfText) { nextChar() index += 1 } index == chars.length } - final def checkedJump(chars: Array[Char]): Boolean = { + final def checkedJump(chars: String): Boolean = { val poff = offset val pc = char val ok = jump(chars) @@ -845,7 +851,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => ok } - final def repeatJump(chars: Array[Char], max: Int): Int = { + final def repeatJump(chars: String, max: Int): Int = { var count = 0 var more = true while (more && count < max) { @@ -857,7 +863,7 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => count } - final def repeatJump(chars: Array[Char]): Int = { + final def repeatJump(chars: String): Int = { var count = 0 var more = true while (more) { @@ -878,10 +884,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => count } - final def jumpUntil(chars: Array[Char]): Int = { + final def jumpUntil(chars: String): Int = { assert(chars.length > 0) var count = 0 - val c = chars(0) + val c = chars.charAt(0) while (!check(chars) && char != endOfText) { nextChar() while (char != c && char != endOfText) { @@ -922,10 +928,10 @@ trait CommentFactory { thisFactory: ModelFactory with CommentFactory => count } - final def readUntil(chars: Array[Char]): Int = { + final def readUntil(chars: String): Int = { assert(chars.length > 0) var count = 0 - val c = chars(0) + val c = chars.charAt(0) while (!check(chars) && char != endOfText) { readBuilder += char nextChar() -- cgit v1.2.3 From 9b9fb2cad46041c6cf101ec436b643e3e922bd35 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 26 Jan 2012 13:19:41 -0800 Subject: Disabled "not found" suggestions. The benchmarks charts are confusing me and I want to rule it out as a problem by not having it exist for a while. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 4 ++++ test/files/neg/nopredefs.check | 2 +- test/files/neg/suggest-similar.check | 6 +++--- test/files/neg/t2870.check | 2 +- 4 files changed, 9 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 8af8bbc6ca..6ee09d064f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -171,6 +171,8 @@ trait ContextErrors { NormalTypeError(tree, "reference to " + name + " is ambiguous;\n" + msg) def SymbolNotFoundError(tree: Tree, name: Name, owner: Symbol, startingIdentCx: Context) = { + /*** Disabled pending investigation of performance impact. + // This laborious determination arrived at to keep the tests working. val calcSimilar = ( name.length > 2 && ( @@ -196,6 +198,8 @@ trait ContextErrors { similarString("" + name, allowedStrings) } } + */ + val similar = "" NormalTypeError(tree, "not found: "+decodeWithKind(name, owner) + similar) } diff --git a/test/files/neg/nopredefs.check b/test/files/neg/nopredefs.check index e6c1af78a0..0a0ab34482 100644 --- a/test/files/neg/nopredefs.check +++ b/test/files/neg/nopredefs.check @@ -1,4 +1,4 @@ -nopredefs.scala:5: error: not found: value Set (similar: Seq) +nopredefs.scala:5: error: not found: value Set val y = Set(3) ^ one error found diff --git a/test/files/neg/suggest-similar.check b/test/files/neg/suggest-similar.check index 0a858aaf2e..057aa8b250 100644 --- a/test/files/neg/suggest-similar.check +++ b/test/files/neg/suggest-similar.check @@ -1,10 +1,10 @@ -suggest-similar.scala:8: error: not found: value flippitx (similar: flippity) +suggest-similar.scala:8: error: not found: value flippitx flippitx = 123 ^ -suggest-similar.scala:9: error: not found: value identiyt (similar: identity) +suggest-similar.scala:9: error: not found: value identiyt Nil map identiyt ^ -suggest-similar.scala:10: error: not found: type Bingus (similar: Dingus) +suggest-similar.scala:10: error: not found: type Bingus new Bingus ^ three errors found diff --git a/test/files/neg/t2870.check b/test/files/neg/t2870.check index ab962d48c8..99522eca65 100644 --- a/test/files/neg/t2870.check +++ b/test/files/neg/t2870.check @@ -1,4 +1,4 @@ -t2870.scala:1: error: not found: type Jar (similar: Jars) +t2870.scala:1: error: not found: type Jar class Jars(jar: Jar) ^ t2870.scala:4: error: encountered unrecoverable cycle resolving import. -- cgit v1.2.3 From 6a5901461c1aaac7dd8786cb374b079520895527 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Fri, 27 Jan 2012 13:22:49 +0100 Subject: -Yshow-symkinds: prints kinds next to symbol names This very situational option proved to be very useful when debugging https://issues.scala-lang.org/browse/SI-5415 With the help of -Yshow-symkinds, it became possible to distinguish a free var from a module symbol, which gave precise indication of the root of the bug. This changeset also upgrades stringification of symbols and types, so I'd like to get a review by @paulp and @odersky. --- src/compiler/scala/reflect/internal/Symbols.scala | 73 ++++++++++++++-------- .../scala/reflect/internal/TreePrinters.scala | 18 ++++-- src/compiler/scala/reflect/internal/Types.scala | 2 +- .../internal/settings/MutableSettings.scala | 1 + src/compiler/scala/reflect/runtime/Settings.scala | 1 + src/compiler/scala/reflect/runtime/ToolBoxes.scala | 20 +++--- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + 7 files changed, 77 insertions(+), 39 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 9f8476a6fe..47a4f190cb 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1898,36 +1898,42 @@ trait Symbols extends api.Symbols { self: SymbolTable => else if (isTerm && (!isParameter || isParamAccessor)) "val" else "" + private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) + private def symbolKind: SymbolKind = { + val kind = + if (isPackage) ("package", "package", "PK") + else if (isPackageClass) ("package class", "package", "PC") + else if (isPackageObject) ("package object", "package", "PO") + else if (isPackageObjectClass) ("package object class", "package", "POC") + else if (isRefinementClass) ("refinement class", "", "RC") + else if (isModule) ("module", "object", "MO") + else if (isModuleClass) ("module class", "object", "MC") + else if (isGetter) ("getter", "method", "GET") + else if (isSetter) ("setter", "method", "SET") + else if (isVariable) ("field", "variable", "F") + else if (isTrait) ("trait", "trait", "TR") + else if (isClass) ("class", "class", "CLS") + else if (isType) ("type", "type", "TPE") + else if (isInstanceOf[FreeVar]) ("free variable", "free variable", "FV") + else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ") + else if (isClassConstructor) ("constructor", "constructor", "CTR") + else if (isSourceMethod) ("method", "method", "MET") + else if (isTerm) ("value", "value", "VAL") + else ("", "", "??") + SymbolKind(kind._1, kind._2, kind._3) + } + /** Accurate string representation of symbols' kind, suitable for developers. */ final def accurateKindString: String = - if (isPackage) "package" - else if (isPackageClass) "package class" - else if (isPackageObject) "package object" - else if (isPackageObjectClass) "package object class" - else if (isRefinementClass) "refinement class" - else if (isModule) "module" - else if (isModuleClass) "module class" - else if (isGetter) "getter" - else if (isSetter) "setter" - else if (isVariable) "field" - else sanitizedKindString + symbolKind.accurate /** String representation of symbol's kind, suitable for the masses. */ private def sanitizedKindString: String = - if (isPackage || isPackageClass) "package" - else if (isModule || isModuleClass) "object" - else if (isAnonymousClass) "anonymous class" - else if (isRefinementClass) "" - else if (isTrait) "trait" - else if (isClass) "class" - else if (isType) "type" - else if (isInstanceOf[FreeVar]) "free variable" - else if (isTerm && isLazy) "lazy value" - else if (isVariable) "variable" - else if (isClassConstructor) "constructor" - else if (isSourceMethod) "method" - else if (isTerm) "value" - else "" + symbolKind.sanitized + + /** String representation of symbol's kind, suitable for the masses. */ + protected[scala] def abbreviatedKindString: String = + symbolKind.abbreviation final def kindString: String = if (settings.debug.value) accurateKindString @@ -1950,12 +1956,25 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If !settings.debug translates expansions of operators back to operator symbol. * E.g. $eq => =. * If settings.uniqid, adds id. + * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = ( - if (settings.uniqid.value) decodedName + "#" + id - else "" + decodedName + if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName + else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id + else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString + else decodedName + "#" + id + "#" + abbreviatedKindString ) + def fullNameString: String = { + def recur(sym: Symbol): String = { + if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) nameString + else if (sym.owner.isEffectiveRoot) nameString + else recur(sym.effectiveOwner.enclClass) + "." + nameString + } + + recur(this) + } + /** If settings.uniqid is set, the symbol's id, else "" */ final def idString = if (settings.uniqid.value) "#"+id else "" diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala index 3a0717d344..63e4c9f1fa 100644 --- a/src/compiler/scala/reflect/internal/TreePrinters.scala +++ b/src/compiler/scala/reflect/internal/TreePrinters.scala @@ -27,10 +27,20 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable => /** Turns a path into a String, introducing backquotes * as necessary. */ - def backquotedPath(t: Tree): String = t match { - case Select(qual, name) => "%s.%s".format(backquotedPath(qual), quotedName(name)) - case Ident(name) => quotedName(name) - case _ => t.toString + def backquotedPath(t: Tree): String = { + def suffix(t: Tree) = { + var suffix = "" + if (t.hasSymbol && settings.uniqid.value) suffix += ("#" + t.symbol.id) + if (t.hasSymbol && settings.Yshowsymkinds.value) suffix += ("#" + t.symbol.abbreviatedKindString) + suffix + } + + t match { + case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) + case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) + case Ident(name) => quotedName(name) + suffix(t) + case _ => t.toString + } } class TreePrinter(out: PrintWriter) extends super.TreePrinter { diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 35d26493f8..fab10f7896 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1205,7 +1205,7 @@ trait Types extends api.Types { self: SymbolTable => if (settings.debug.value) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" - else if (sym.isModuleClass) sym.fullName + "." + else if (sym.isModuleClass) sym.fullNameString + "." else sym.nameString + ".this." override def safeToString: String = if (sym.isRoot) "" diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala index 6980d28bfb..0092f73fe3 100644 --- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala @@ -38,6 +38,7 @@ abstract class MutableSettings extends AbsSettings { def explaintypes: BooleanSetting def verbose: BooleanSetting def uniqid: BooleanSetting + def Yshowsymkinds: BooleanSetting def Xprintpos: BooleanSetting def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala index 2a6cdea519..b4f0123114 100644 --- a/src/compiler/scala/reflect/runtime/Settings.scala +++ b/src/compiler/scala/reflect/runtime/Settings.scala @@ -28,6 +28,7 @@ class Settings extends internal.settings.MutableSettings { val explaintypes = new BooleanSetting(false) val verbose = new BooleanSetting(false) val uniqid = new BooleanSetting(false) + val Yshowsymkinds = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val Yrecursion = new IntSetting(0) diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 9ab12c6a86..46d890c5d1 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -123,15 +123,21 @@ trait ToolBoxes extends { self: Universe => applyMeth.invoke(result) } } - - def showAttributed(tree: Tree): String = { - val saved = settings.printtypes.value + + def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = { + val saved1 = settings.printtypes.value + val saved2 = settings.uniqid.value + val saved3 = settings.Yshowsymkinds.value try { - settings.printtypes.value = true - //settings.uniqid.value = true + settings.printtypes.value = printTypes + settings.uniqid.value = printIds + settings.uniqid.value = printKinds tree.toString - } finally - compiler.settings.printtypes.value = saved + } finally { + settings.printtypes.value = saved1 + settings.uniqid.value = saved2 + settings.Yshowsymkinds.value = saved3 + } } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 6806ca03ba..107ffc35c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -140,6 +140,7 @@ trait ScalaSettings extends AbsScalaSettings val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.") val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs.") val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.") + val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.") val skip = PhasesSetting ("-Yskip", "Skip") val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "") val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") -- cgit v1.2.3 From 3539dde9cdc15d6a2ae107b1322dcc7366613af4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 27 Jan 2012 21:37:03 -0800 Subject: Revert "-Yshow-symkinds: prints kinds next to symbol names" This reverts commit 6a5901461c1aaac7dd8786cb374b079520895527. --- src/compiler/scala/reflect/internal/Symbols.scala | 73 ++++++++-------------- .../scala/reflect/internal/TreePrinters.scala | 18 ++---- src/compiler/scala/reflect/internal/Types.scala | 2 +- .../internal/settings/MutableSettings.scala | 1 - src/compiler/scala/reflect/runtime/Settings.scala | 1 - src/compiler/scala/reflect/runtime/ToolBoxes.scala | 20 +++--- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 - 7 files changed, 39 insertions(+), 77 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 47a4f190cb..9f8476a6fe 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1898,42 +1898,36 @@ trait Symbols extends api.Symbols { self: SymbolTable => else if (isTerm && (!isParameter || isParamAccessor)) "val" else "" - private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) - private def symbolKind: SymbolKind = { - val kind = - if (isPackage) ("package", "package", "PK") - else if (isPackageClass) ("package class", "package", "PC") - else if (isPackageObject) ("package object", "package", "PO") - else if (isPackageObjectClass) ("package object class", "package", "POC") - else if (isRefinementClass) ("refinement class", "", "RC") - else if (isModule) ("module", "object", "MO") - else if (isModuleClass) ("module class", "object", "MC") - else if (isGetter) ("getter", "method", "GET") - else if (isSetter) ("setter", "method", "SET") - else if (isVariable) ("field", "variable", "F") - else if (isTrait) ("trait", "trait", "TR") - else if (isClass) ("class", "class", "CLS") - else if (isType) ("type", "type", "TPE") - else if (isInstanceOf[FreeVar]) ("free variable", "free variable", "FV") - else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ") - else if (isClassConstructor) ("constructor", "constructor", "CTR") - else if (isSourceMethod) ("method", "method", "MET") - else if (isTerm) ("value", "value", "VAL") - else ("", "", "??") - SymbolKind(kind._1, kind._2, kind._3) - } - /** Accurate string representation of symbols' kind, suitable for developers. */ final def accurateKindString: String = - symbolKind.accurate + if (isPackage) "package" + else if (isPackageClass) "package class" + else if (isPackageObject) "package object" + else if (isPackageObjectClass) "package object class" + else if (isRefinementClass) "refinement class" + else if (isModule) "module" + else if (isModuleClass) "module class" + else if (isGetter) "getter" + else if (isSetter) "setter" + else if (isVariable) "field" + else sanitizedKindString /** String representation of symbol's kind, suitable for the masses. */ private def sanitizedKindString: String = - symbolKind.sanitized - - /** String representation of symbol's kind, suitable for the masses. */ - protected[scala] def abbreviatedKindString: String = - symbolKind.abbreviation + if (isPackage || isPackageClass) "package" + else if (isModule || isModuleClass) "object" + else if (isAnonymousClass) "anonymous class" + else if (isRefinementClass) "" + else if (isTrait) "trait" + else if (isClass) "class" + else if (isType) "type" + else if (isInstanceOf[FreeVar]) "free variable" + else if (isTerm && isLazy) "lazy value" + else if (isVariable) "variable" + else if (isClassConstructor) "constructor" + else if (isSourceMethod) "method" + else if (isTerm) "value" + else "" final def kindString: String = if (settings.debug.value) accurateKindString @@ -1956,25 +1950,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If !settings.debug translates expansions of operators back to operator symbol. * E.g. $eq => =. * If settings.uniqid, adds id. - * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = ( - if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName - else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id - else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString - else decodedName + "#" + id + "#" + abbreviatedKindString + if (settings.uniqid.value) decodedName + "#" + id + else "" + decodedName ) - def fullNameString: String = { - def recur(sym: Symbol): String = { - if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) nameString - else if (sym.owner.isEffectiveRoot) nameString - else recur(sym.effectiveOwner.enclClass) + "." + nameString - } - - recur(this) - } - /** If settings.uniqid is set, the symbol's id, else "" */ final def idString = if (settings.uniqid.value) "#"+id else "" diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala index 63e4c9f1fa..3a0717d344 100644 --- a/src/compiler/scala/reflect/internal/TreePrinters.scala +++ b/src/compiler/scala/reflect/internal/TreePrinters.scala @@ -27,20 +27,10 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable => /** Turns a path into a String, introducing backquotes * as necessary. */ - def backquotedPath(t: Tree): String = { - def suffix(t: Tree) = { - var suffix = "" - if (t.hasSymbol && settings.uniqid.value) suffix += ("#" + t.symbol.id) - if (t.hasSymbol && settings.Yshowsymkinds.value) suffix += ("#" + t.symbol.abbreviatedKindString) - suffix - } - - t match { - case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) - case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) - case Ident(name) => quotedName(name) + suffix(t) - case _ => t.toString - } + def backquotedPath(t: Tree): String = t match { + case Select(qual, name) => "%s.%s".format(backquotedPath(qual), quotedName(name)) + case Ident(name) => quotedName(name) + case _ => t.toString } class TreePrinter(out: PrintWriter) extends super.TreePrinter { diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index fab10f7896..35d26493f8 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1205,7 +1205,7 @@ trait Types extends api.Types { self: SymbolTable => if (settings.debug.value) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" - else if (sym.isModuleClass) sym.fullNameString + "." + else if (sym.isModuleClass) sym.fullName + "." else sym.nameString + ".this." override def safeToString: String = if (sym.isRoot) "" diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala index 0092f73fe3..6980d28bfb 100644 --- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala @@ -38,7 +38,6 @@ abstract class MutableSettings extends AbsSettings { def explaintypes: BooleanSetting def verbose: BooleanSetting def uniqid: BooleanSetting - def Yshowsymkinds: BooleanSetting def Xprintpos: BooleanSetting def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala index b4f0123114..2a6cdea519 100644 --- a/src/compiler/scala/reflect/runtime/Settings.scala +++ b/src/compiler/scala/reflect/runtime/Settings.scala @@ -28,7 +28,6 @@ class Settings extends internal.settings.MutableSettings { val explaintypes = new BooleanSetting(false) val verbose = new BooleanSetting(false) val uniqid = new BooleanSetting(false) - val Yshowsymkinds = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val Yrecursion = new IntSetting(0) diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 46d890c5d1..9ab12c6a86 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -123,21 +123,15 @@ trait ToolBoxes extends { self: Universe => applyMeth.invoke(result) } } - - def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = { - val saved1 = settings.printtypes.value - val saved2 = settings.uniqid.value - val saved3 = settings.Yshowsymkinds.value + + def showAttributed(tree: Tree): String = { + val saved = settings.printtypes.value try { - settings.printtypes.value = printTypes - settings.uniqid.value = printIds - settings.uniqid.value = printKinds + settings.printtypes.value = true + //settings.uniqid.value = true tree.toString - } finally { - settings.printtypes.value = saved1 - settings.uniqid.value = saved2 - settings.Yshowsymkinds.value = saved3 - } + } finally + compiler.settings.printtypes.value = saved } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 107ffc35c6..6806ca03ba 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -140,7 +140,6 @@ trait ScalaSettings extends AbsScalaSettings val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.") val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs.") val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.") - val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.") val skip = PhasesSetting ("-Yskip", "Skip") val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "") val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") -- cgit v1.2.3 From 1cc493db100d2787b740b9cae59eff9a2a821cb8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 27 Jan 2012 20:09:23 -0800 Subject: Make 'illegal' characters less annoying. At least tell us what they are. --- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index a2a577a7ab..4478fb6128 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -468,7 +468,7 @@ trait Scanners extends ScannersCommon { nextChar() getOperatorRest() } else { - syntaxError("illegal character") + syntaxError("illegal character '" + ("" + '\\' + 'u' + "%04x".format(ch: Int)) + "'") nextChar() } } -- cgit v1.2.3 From 5e2bf28ff209cc587d8bcf0d7fb4ae780936146a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 27 Jan 2012 20:21:34 -0800 Subject: Changed partest to use UTF-8. Finally had a concrete motivation to change partest's inexplicable ISO-8859-1 encoding to UTF-8. My test case would cause it to barf "illegal character". Is this going to break on windows or something? If so, it's time to fix the broken place which can't handle UTF-8. --- src/partest/scala/tools/partest/nest/CompileManager.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala index aea6bcc03a..6604bc551d 100644 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ b/src/partest/scala/tools/partest/nest/CompileManager.scala @@ -26,7 +26,7 @@ class TestSettings(cp: String, error: String => Unit) extends Settings(error) { deprecation.value = true nowarnings.value = false - encoding.value = "ISO-8859-1" + encoding.value = "UTF-8" classpath.value = cp } -- cgit v1.2.3 From 0d0cdea28ec142a3e6da7a29b8130138a41ae782 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 27 Jan 2012 20:09:38 -0800 Subject: Test case closes SI-3854. --- test/files/neg/t3854.check | 5 +++++ test/files/neg/t3854.scala | 15 +++++++++++++++ 2 files changed, 20 insertions(+) create mode 100644 test/files/neg/t3854.check create mode 100644 test/files/neg/t3854.scala diff --git a/test/files/neg/t3854.check b/test/files/neg/t3854.check new file mode 100644 index 0000000000..c478481a6f --- /dev/null +++ b/test/files/neg/t3854.check @@ -0,0 +1,5 @@ +t3854.scala:1: error: class Bar needs to be abstract, since method foo in trait Foo of type [G[_]](implicit n: N[G,F])X[F] is not defined +(Note that N[G,F] does not match M[G]) +class Bar[F[_]] extends Foo[F] { + ^ +one error found diff --git a/test/files/neg/t3854.scala b/test/files/neg/t3854.scala new file mode 100644 index 0000000000..e8db76c0a5 --- /dev/null +++ b/test/files/neg/t3854.scala @@ -0,0 +1,15 @@ +class Bar[F[_]] extends Foo[F] { + def foo[G[_[_], _]](implicit M: M[G]): X[({type λ[α] = G[F, α] })#λ] = null +} +// vim: set ts=4 sw=4 et: + +trait M[F[_[_], _]] +trait N[F[_], G[_]] + +trait X[F[_]] { + def apply[A]: F[A] +} + +trait Foo[F[_]] { + def foo[G[_]](implicit n: N[G, F]): X[F] +} -- cgit v1.2.3 From 4224d2a7b0a6beb47760fc323e9c946813f6bdb0 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sat, 28 Jan 2012 07:49:20 +0100 Subject: -Yshow-symkinds: prints kinds next to symbol names This very situational option proved to be very useful when debugging https://issues.scala-lang.org/browse/SI-5415 With the help of -Yshow-symkinds, it became possible to distinguish a free var from a module symbol, which gave precise indication of the root of the bug. --- src/compiler/scala/reflect/internal/Symbols.scala | 74 ++++++++++++++-------- .../scala/reflect/internal/TreePrinters.scala | 18 ++++-- src/compiler/scala/reflect/internal/Types.scala | 2 +- .../internal/settings/MutableSettings.scala | 1 + src/compiler/scala/reflect/runtime/Settings.scala | 1 + src/compiler/scala/reflect/runtime/ToolBoxes.scala | 20 ++++-- .../scala/tools/nsc/settings/ScalaSettings.scala | 1 + 7 files changed, 78 insertions(+), 39 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 9f8476a6fe..408ff9593a 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1898,36 +1898,43 @@ trait Symbols extends api.Symbols { self: SymbolTable => else if (isTerm && (!isParameter || isParamAccessor)) "val" else "" + private case class SymbolKind(accurate: String, sanitized: String, abbreviation: String) + private def symbolKind: SymbolKind = { + val kind = + if (isInstanceOf[FreeVar]) ("free variable", "free variable", "FV") + else if (isPackage) ("package", "package", "PK") + else if (isPackageClass) ("package class", "package", "PKC") + else if (isPackageObject) ("package object", "package", "PKO") + else if (isPackageObjectClass) ("package object class", "package", "PKOC") + else if (isAnonymousClass) ("anonymous class", "anonymous class", "AC") + else if (isRefinementClass) ("refinement class", "", "RC") + else if (isModule) ("module", "object", "MOD") + else if (isModuleClass) ("module class", "object", "MODC") + else if (isGetter) ("getter", if (isSourceMethod) "method" else "value", "GET") + else if (isSetter) ("setter", if (isSourceMethod) "method" else "value", "SET") + else if (isTerm && isLazy) ("lazy value", "lazy value", "LAZ") + else if (isVariable) ("field", "variable", "VAR") + else if (isTrait) ("trait", "trait", "TRT") + else if (isClass) ("class", "class", "CLS") + else if (isType) ("type", "type", "TPE") + else if (isClassConstructor) ("constructor", "constructor", "CTOR") + else if (isSourceMethod) ("method", "method", "METH") + else if (isTerm) ("value", "value", "VAL") + else ("", "", "???") + SymbolKind(kind._1, kind._2, kind._3) + } + /** Accurate string representation of symbols' kind, suitable for developers. */ final def accurateKindString: String = - if (isPackage) "package" - else if (isPackageClass) "package class" - else if (isPackageObject) "package object" - else if (isPackageObjectClass) "package object class" - else if (isRefinementClass) "refinement class" - else if (isModule) "module" - else if (isModuleClass) "module class" - else if (isGetter) "getter" - else if (isSetter) "setter" - else if (isVariable) "field" - else sanitizedKindString + symbolKind.accurate /** String representation of symbol's kind, suitable for the masses. */ private def sanitizedKindString: String = - if (isPackage || isPackageClass) "package" - else if (isModule || isModuleClass) "object" - else if (isAnonymousClass) "anonymous class" - else if (isRefinementClass) "" - else if (isTrait) "trait" - else if (isClass) "class" - else if (isType) "type" - else if (isInstanceOf[FreeVar]) "free variable" - else if (isTerm && isLazy) "lazy value" - else if (isVariable) "variable" - else if (isClassConstructor) "constructor" - else if (isSourceMethod) "method" - else if (isTerm) "value" - else "" + symbolKind.sanitized + + /** String representation of symbol's kind, suitable for the masses. */ + protected[scala] def abbreviatedKindString: String = + symbolKind.abbreviation final def kindString: String = if (settings.debug.value) accurateKindString @@ -1950,12 +1957,25 @@ trait Symbols extends api.Symbols { self: SymbolTable => * If !settings.debug translates expansions of operators back to operator symbol. * E.g. $eq => =. * If settings.uniqid, adds id. + * If settings.Yshowsymkinds, adds abbreviated symbol kind. */ def nameString: String = ( - if (settings.uniqid.value) decodedName + "#" + id - else "" + decodedName + if (!settings.uniqid.value && !settings.Yshowsymkinds.value) "" + decodedName + else if (settings.uniqid.value && !settings.Yshowsymkinds.value) decodedName + "#" + id + else if (!settings.uniqid.value && settings.Yshowsymkinds.value) decodedName + "#" + abbreviatedKindString + else decodedName + "#" + id + "#" + abbreviatedKindString ) + def fullNameString: String = { + def recur(sym: Symbol): String = { + if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) sym.nameString + else if (sym.owner.isEffectiveRoot) sym.nameString + else recur(sym.effectiveOwner.enclClass) + "." + sym.nameString + } + + recur(this) + } + /** If settings.uniqid is set, the symbol's id, else "" */ final def idString = if (settings.uniqid.value) "#"+id else "" diff --git a/src/compiler/scala/reflect/internal/TreePrinters.scala b/src/compiler/scala/reflect/internal/TreePrinters.scala index 3a0717d344..63e4c9f1fa 100644 --- a/src/compiler/scala/reflect/internal/TreePrinters.scala +++ b/src/compiler/scala/reflect/internal/TreePrinters.scala @@ -27,10 +27,20 @@ trait TreePrinters extends api.TreePrinters { self: SymbolTable => /** Turns a path into a String, introducing backquotes * as necessary. */ - def backquotedPath(t: Tree): String = t match { - case Select(qual, name) => "%s.%s".format(backquotedPath(qual), quotedName(name)) - case Ident(name) => quotedName(name) - case _ => t.toString + def backquotedPath(t: Tree): String = { + def suffix(t: Tree) = { + var suffix = "" + if (t.hasSymbol && settings.uniqid.value) suffix += ("#" + t.symbol.id) + if (t.hasSymbol && settings.Yshowsymkinds.value) suffix += ("#" + t.symbol.abbreviatedKindString) + suffix + } + + t match { + case Select(qual, name) if name.isTermName => "%s.%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) + case Select(qual, name) if name.isTypeName => "%s#%s".format(backquotedPath(qual), quotedName(name)) + suffix(t) + case Ident(name) => quotedName(name) + suffix(t) + case _ => t.toString + } } class TreePrinter(out: PrintWriter) extends super.TreePrinter { diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 35d26493f8..fab10f7896 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1205,7 +1205,7 @@ trait Types extends api.Types { self: SymbolTable => if (settings.debug.value) sym.nameString + ".this." else if (sym.isAnonOrRefinementClass) "this." else if (sym.isOmittablePrefix) "" - else if (sym.isModuleClass) sym.fullName + "." + else if (sym.isModuleClass) sym.fullNameString + "." else sym.nameString + ".this." override def safeToString: String = if (sym.isRoot) "" diff --git a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala index 6980d28bfb..0092f73fe3 100644 --- a/src/compiler/scala/reflect/internal/settings/MutableSettings.scala +++ b/src/compiler/scala/reflect/internal/settings/MutableSettings.scala @@ -38,6 +38,7 @@ abstract class MutableSettings extends AbsSettings { def explaintypes: BooleanSetting def verbose: BooleanSetting def uniqid: BooleanSetting + def Yshowsymkinds: BooleanSetting def Xprintpos: BooleanSetting def Yrecursion: IntSetting def maxClassfileName: IntSetting diff --git a/src/compiler/scala/reflect/runtime/Settings.scala b/src/compiler/scala/reflect/runtime/Settings.scala index 2a6cdea519..b4f0123114 100644 --- a/src/compiler/scala/reflect/runtime/Settings.scala +++ b/src/compiler/scala/reflect/runtime/Settings.scala @@ -28,6 +28,7 @@ class Settings extends internal.settings.MutableSettings { val explaintypes = new BooleanSetting(false) val verbose = new BooleanSetting(false) val uniqid = new BooleanSetting(false) + val Yshowsymkinds = new BooleanSetting(false) val Xprintpos = new BooleanSetting(false) val printtypes = new BooleanSetting(false) val Yrecursion = new IntSetting(0) diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 9ab12c6a86..46d890c5d1 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -123,15 +123,21 @@ trait ToolBoxes extends { self: Universe => applyMeth.invoke(result) } } - - def showAttributed(tree: Tree): String = { - val saved = settings.printtypes.value + + def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = { + val saved1 = settings.printtypes.value + val saved2 = settings.uniqid.value + val saved3 = settings.Yshowsymkinds.value try { - settings.printtypes.value = true - //settings.uniqid.value = true + settings.printtypes.value = printTypes + settings.uniqid.value = printIds + settings.uniqid.value = printKinds tree.toString - } finally - compiler.settings.printtypes.value = saved + } finally { + settings.printtypes.value = saved1 + settings.uniqid.value = saved2 + settings.Yshowsymkinds.value = saved3 + } } } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 6806ca03ba..107ffc35c6 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -140,6 +140,7 @@ trait ScalaSettings extends AbsScalaSettings val selfInAnnots = BooleanSetting ("-Yself-in-annots", "Include a \"self\" identifier inside of annotations.") val Xshowtrees = BooleanSetting ("-Yshow-trees", "(Requires -Xprint:) Print detailed ASTs.") val Yshowsyms = BooleanSetting ("-Yshow-syms", "Print the AST symbol hierarchy after each phase.") + val Yshowsymkinds = BooleanSetting ("-Yshow-symkinds", "Print abbreviated symbol kinds next to symbol names.") val skip = PhasesSetting ("-Yskip", "Skip") val Ygenjavap = StringSetting ("-Ygen-javap", "dir", "Generate a parallel output directory of .javap files.", "") val Ydumpclasses = StringSetting ("-Ydump-classes", "dir", "Dump the generated bytecode to .class files (useful for reflective compilation that utilizes in-memory classloaders).", "") -- cgit v1.2.3 From 1c963535daf42a636030f6e905c7c4529744e0c3 Mon Sep 17 00:00:00 2001 From: Lucien Pereira Date: Sat, 28 Jan 2012 14:13:17 +0100 Subject: Use of polymorphic dispatch instead of pattern matching. Use a specialized iterator. --- src/library/scala/collection/mutable/AVLTree.scala | 281 ++++++++++++--------- src/library/scala/collection/mutable/TreeSet.scala | 14 +- 2 files changed, 168 insertions(+), 127 deletions(-) diff --git a/src/library/scala/collection/mutable/AVLTree.scala b/src/library/scala/collection/mutable/AVLTree.scala index f0a6c690b6..ba2af8f120 100644 --- a/src/library/scala/collection/mutable/AVLTree.scala +++ b/src/library/scala/collection/mutable/AVLTree.scala @@ -21,180 +21,221 @@ private[mutable] sealed trait AVLTree[+A] extends Serializable { def depth: Int -} + def iterator[B >: A]: Iterator[B] = Iterator.empty -private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] { - override val balance: Int = right.depth - left.depth + def contains[B >: A](value: B, ordering: Ordering[B]): Boolean = false - override val depth: Int = math.max(left.depth, right.depth) + 1 + /** + * Returns a new tree containing the given element. + * Thows an IllegalArgumentException if element is already present. + * + */ + def insert[B >: A](value: B, ordering: Ordering[B]): AVLTree[B] = Node(value, Leaf, Leaf) + + /** + * Return a new tree which not contains given element. + * + */ + def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = + throw new NoSuchElementException(String.valueOf(value)) + + /** + * Return a tuple containing the smallest element of the provided tree + * and a new tree from which this element has been extracted. + * + */ + def removeMin[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.") + + /** + * Return a tuple containing the biggest element of the provided tree + * and a new tree from which this element has been extracted. + * + */ + def removeMax[B >: A]: (B, AVLTree[B]) = sys.error("Should not happen.") + + def rebalance[B >: A]: AVLTree[B] = this + + def leftRotation[B >: A]: Node[B] = sys.error("Should not happen.") + + def rightRotation[B >: A]: Node[B] = sys.error("Should not happen.") + + def doubleLeftRotation[B >: A]: Node[B] = sys.error("Should not happen.") + def doubleRightRotation[B >: A]: Node[B] = sys.error("Should not happen.") } private case object Leaf extends AVLTree[Nothing] { override val balance: Int = 0 override val depth: Int = -1 - } -private[mutable] object AVLTree { +private case class Node[A](val data: A, val left: AVLTree[A], val right: AVLTree[A]) extends AVLTree[A] { + override val balance: Int = right.depth - left.depth + + override val depth: Int = math.max(left.depth, right.depth) + 1 + + override def iterator[B >: A]: Iterator[B] = new AVLIterator(this) + + override def contains[B >: A](value: B, ordering: Ordering[B]) = { + val ord = ordering.compare(value, data) + if (0 == ord) + true + else if (ord < 0) + left.contains(value, ordering) + else + right.contains(value, ordering) + } /** * Returns a new tree containing the given element. * Thows an IllegalArgumentException if element is already present. * */ - def insert[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): AVLTree[A] = tree match { - case Leaf => Node(value, Leaf, Leaf) - - case Node(a, left, right) => if (0 == ordering.compare(value, a)) { + override def insert[B >: A](value: B, ordering: Ordering[B]) = { + val ord = ordering.compare(value, data) + if (0 == ord) throw new IllegalArgumentException() - } else if (-1 == ordering.compare(value, a)) { - rebalance(Node(a, insert(value, left, ordering), right)) - } else { - rebalance(Node(a, left, insert(value, right, ordering))) - } - } - - def contains[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): Boolean = tree match { - case Leaf => false - - case Node(a, left, right) => if (0 == ordering.compare(value, a)) { - true - } else if (-1 == ordering.compare(value, a)) { - contains(value, left, ordering) - } else { - contains(value, right, ordering) - } + else if (ord < 0) + Node(data, left.insert(value, ordering), right).rebalance + else + Node(data, left, right.insert(value, ordering)).rebalance } /** * Return a new tree which not contains given element. * */ - def remove[A](value: A, tree: AVLTree[A], ordering: Ordering[A]): AVLTree[A] = tree match { - case Leaf => throw new NoSuchElementException() - - case Node(a, Leaf, Leaf) => if (0 == ordering.compare(value, a)) { - Leaf - } else { - throw new NoSuchElementException() - } - - case Node(a, left, right@Node(_, _, _)) => if (0 == ordering.compare(value, a)) { - val (min, newRight) = removeMin(right) - rebalance(Node(min, left, newRight)) - } else if (-1 == ordering.compare(value, a)) { - rebalance(Node(a, remove(value, left, ordering), right)) - } else { - rebalance(Node(a, left, remove(value, right, ordering))) - } - - case Node(a, left: Node[A], right) => if (0 == ordering.compare(value, a)) { - val (max, newLeft) = removeMax(left) - rebalance(Node(max, newLeft, right)) - } else if (-1 == ordering.compare(value, a)) { - rebalance(Node(a, remove(value, left, ordering), right)) + override def remove[B >: A](value: B, ordering: Ordering[B]): AVLTree[A] = { + val ord = ordering.compare(value, data) + if(ord == 0) { + if (Leaf == left) { + if (Leaf == right) { + Leaf + } else { + val (min, newRight) = right.removeMin + Node(min, left, newRight).rebalance + } + } else { + val (max, newLeft) = left.removeMax + Node(max, newLeft, right).rebalance + } + } else if (ord < 0) { + Node(data, left.remove(value, ordering), right).rebalance } else { - rebalance(Node(a, left, remove(value, right, ordering))) + Node(data, left, right.remove(value, ordering)).rebalance } } /** - * Return a tuple containing the biggest element of the provided tree + * Return a tuple containing the smallest element of the provided tree * and a new tree from which this element has been extracted. * */ - def removeMax[A](tree: AVLTree[A]): (A, AVLTree[A]) = tree match { - case Node(a, Leaf, Leaf) => (a, Leaf) - - case Node(a, left, Leaf) => (a, left) - - case Node(a, left, right) => { - val (max, newRight) = removeMax(right) - (max, rebalance(Node(a, left, newRight))) + override def removeMin[B >: A]: (B, AVLTree[B]) = { + if (Leaf == left) + (data, right) + else { + val (min, newLeft) = left.removeMin + (min, Node(data, newLeft, right).rebalance) } - - case Leaf => sys.error("Should not happen.") } /** - * Return a tuple containing the smallest element of the provided tree + * Return a tuple containing the biggest element of the provided tree * and a new tree from which this element has been extracted. * */ - def removeMin[A](tree: AVLTree[A]): (A, AVLTree[A]) = tree match { - case Node(a, Leaf, Leaf) => (a, Leaf) - - case Node(a, Leaf, right) => (a, right) - - case Node(a, left, right) => { - val (min, newLeft) = removeMin(left) - (min, rebalance(Node(a, newLeft, right))) + override def removeMax[B >: A]: (B, AVLTree[B]) = { + if (Leaf == right) + (data, left) + else { + val (max, newRight) = right.removeMax + (max, Node(data, left, newRight).rebalance) } - - case Leaf => sys.error("Should not happen.") } - - /** - * Returns a bounded stream of elements in the tree. - * - */ - def toStream[A](tree: AVLTree[A], isLeftAcceptable: A => Boolean, isRightAcceptable: A => Boolean): Stream[A] = tree match { - case Leaf => Stream.empty - - case Node(a, left, right) => if (isLeftAcceptable(a)) { - if (isRightAcceptable(a)) { - toStream(left, isLeftAcceptable, isRightAcceptable) ++ Stream(a) ++ toStream(right, isLeftAcceptable, isRightAcceptable) - } else { - toStream(left, isLeftAcceptable, isRightAcceptable) - } - } else if (isRightAcceptable(a)) { - toStream(right, isLeftAcceptable, isRightAcceptable) + + override def rebalance[B >: A] = { + if (-2 == balance) { + if (1 == left.balance) + doubleRightRotation + else + rightRotation + } else if (2 == balance) { + if (-1 == right.balance) + doubleLeftRotation + else + leftRotation } else { - Stream.empty + this } } - /** - * Returns a bounded iterator of elements in the tree. - * - */ - def iterator[A](tree: AVLTree[A], isLeftAcceptable: A => Boolean, isRightAcceptable: A => Boolean): Iterator[A] = - toStream(tree, isLeftAcceptable, isRightAcceptable).iterator - - def rebalance[A](tree: AVLTree[A]): AVLTree[A] = (tree, tree.balance) match { - case (node@Node(_, left, _), -2) => left.balance match { - case 1 => doubleRightRotation(node) - case _ => rightRotation(node) - } - - case (node@Node(_, _, right), 2) => right.balance match { - case -1 => doubleLeftRotation(node) - case _ => leftRotation(node) - } + override def leftRotation[B >: A] = { + if (Leaf != right) { + val r: Node[A] = right.asInstanceOf[Node[A]] + Node(r.data, Node(data, left, r.left), r.right) + } else sys.error("Should not happen.") + } - case _ => tree + override def rightRotation[B >: A] = { + if (Leaf != left) { + val l: Node[A] = left.asInstanceOf[Node[A]] + Node(l.data, l.left, Node(data, l.right, right)) + } else sys.error("Should not happen.") } - def leftRotation[A](tree: Node[A]): AVLTree[A] = tree.right match { - case Node(b, left, right) => Node(b, Node(tree.data, tree.left, left), right) - case _ => sys.error("Should not happen.") + override def doubleLeftRotation[B >: A] = { + if (Leaf != right) { + val r: Node[A] = right.asInstanceOf[Node[A]] + // Let's save an instanceOf by 'inlining' the left rotation + val rightRotated = r.rightRotation + Node(rightRotated.data, Node(data, left, rightRotated.left), rightRotated.right) + } else sys.error("Should not happen.") } - def rightRotation[A](tree: Node[A]): AVLTree[A] = tree.left match { - case Node(b, left, right) => Node(b, left, Node(tree.data, right, tree.right)) - case _ => sys.error("Should not happen.") + override def doubleRightRotation[B >: A] = { + if (Leaf != left) { + val l: Node[A] = left.asInstanceOf[Node[A]] + // Let's save an instanceOf by 'inlining' the right rotation + val leftRotated = l.leftRotation + Node(leftRotated.data, leftRotated.left, Node(data, leftRotated.right, right)) + } else sys.error("Should not happen.") } +} + +private class AVLIterator[A](root: Node[A]) extends Iterator[A] { + val stack = mutable.ArrayStack[Node[A]](root) + diveLeft() - def doubleLeftRotation[A](tree: Node[A]): AVLTree[A] = tree.right match { - case right@Node(b, l, r) => leftRotation(Node(tree.data, tree.left, rightRotation(right))) - case _ => sys.error("Should not happen.") + private def diveLeft(): Unit = { + if (Leaf != stack.head.left) { + val left: Node[A] = stack.head.left.asInstanceOf[Node[A]] + stack.push(left) + diveLeft() + } } - def doubleRightRotation[A](tree: Node[A]): AVLTree[A] = tree.left match { - case left@Node(b, l, r) => rightRotation(Node(tree.data, leftRotation(left), tree.right)) - case _ => sys.error("Should not happen.") + private def engageRight(): Unit = { + if (Leaf != stack.head.right) { + val right: Node[A] = stack.head.right.asInstanceOf[Node[A]] + stack.pop + stack.push(right) + diveLeft() + } else + stack.pop } + override def hasNext: Boolean = !stack.isEmpty + + override def next(): A = { + if (stack.isEmpty) + throw new NoSuchElementException() + else { + val result = stack.head.data + // Let's maintain stack for the next invocation + engageRight() + result + } + } } diff --git a/src/library/scala/collection/mutable/TreeSet.scala b/src/library/scala/collection/mutable/TreeSet.scala index 56b4b349cf..e0f1c3adfe 100644 --- a/src/library/scala/collection/mutable/TreeSet.scala +++ b/src/library/scala/collection/mutable/TreeSet.scala @@ -79,7 +79,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S override def -=(elem: A): this.type = { try { - resolve.avl = AVLTree.remove(elem, resolve.avl, ordering) + resolve.avl = resolve.avl.remove(elem, ordering) resolve.cardinality = resolve.cardinality - 1 } catch { case e: NoSuchElementException => () @@ -89,7 +89,7 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S override def +=(elem: A): this.type = { try { - resolve.avl = AVLTree.insert(elem, resolve.avl, ordering) + resolve.avl = resolve.avl.insert(elem, ordering) resolve.cardinality = resolve.cardinality + 1 } catch { case e: IllegalArgumentException => () @@ -113,11 +113,11 @@ class TreeSet[A](implicit val ordering: Ordering[A]) extends SortedSet[A] with S override def contains(elem: A): Boolean = { isLeftAcceptable(from, ordering)(elem) && isRightAcceptable(until, ordering)(elem) && - AVLTree.contains(elem, resolve.avl, ordering) + resolve.avl.contains(elem, ordering) } - override def iterator: Iterator[A] = - AVLTree.iterator(resolve.avl, - isLeftAcceptable(from, ordering), - isRightAcceptable(until, ordering)) + override def iterator: Iterator[A] = resolve.avl.iterator + .dropWhile(e => !isLeftAcceptable(from, ordering)(e)) + .takeWhile(e => isRightAcceptable(until, ordering)(e)) + } -- cgit v1.2.3 From 2616dd6348a87d3d9cd306d3ab76d039c15d10c4 Mon Sep 17 00:00:00 2001 From: Lucien Pereira Date: Sat, 28 Jan 2012 14:43:45 +0100 Subject: Added benchmarking files, in order to easily compare various implementations performances. --- test/benchmarking/TreeSetInsert.scala | 65 ++++++++++++++++++++++++++++ test/benchmarking/TreeSetInsertRandom.scala | 65 ++++++++++++++++++++++++++++ test/benchmarking/TreeSetIterator.scala | 66 +++++++++++++++++++++++++++++ test/benchmarking/TreeSetRemove.scala | 66 +++++++++++++++++++++++++++++ test/benchmarking/TreeSetRemoveRandom.scala | 66 +++++++++++++++++++++++++++++ 5 files changed, 328 insertions(+) create mode 100644 test/benchmarking/TreeSetInsert.scala create mode 100644 test/benchmarking/TreeSetInsertRandom.scala create mode 100644 test/benchmarking/TreeSetIterator.scala create mode 100644 test/benchmarking/TreeSetRemove.scala create mode 100644 test/benchmarking/TreeSetRemoveRandom.scala diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala new file mode 100644 index 0000000000..61b064ae33 --- /dev/null +++ b/test/benchmarking/TreeSetInsert.scala @@ -0,0 +1,65 @@ + +object TreeSetInsert { + + def main(args: Array[String]): Unit = { + val n = 500000 + new JavaUtilTS(n).main(args) + new MutableTS(n).main(args) + new ImmutableTS(n).main(args) + } +} + +class Dummy(val a: Int) extends math.Ordered[Dummy] { + def compare(other: Dummy) = this.a - other.a + + override def toString = a.toString + } + + +class JavaUtilTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: java.util.TreeSet[Dummy] = null + + def run = { + t = new java.util.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t add elem + i += 1 + } + } +} + +class MutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.mutable.TreeSet[Dummy] = null + + def run = { + t = collection.mutable.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t += elem + i += 1 + } + } +} + +class ImmutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.immutable.TreeSet[Dummy] = null + + def run = { + t = collection.immutable.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t += elem + i += 1 + } + } +} diff --git a/test/benchmarking/TreeSetInsertRandom.scala b/test/benchmarking/TreeSetInsertRandom.scala new file mode 100644 index 0000000000..7f182548b7 --- /dev/null +++ b/test/benchmarking/TreeSetInsertRandom.scala @@ -0,0 +1,65 @@ + +object TreeSetInsertRandom { + + def main(args: Array[String]): Unit = { + val n = 500000 + new JavaUtilTS(n).main(args) + new MutableTS(n).main(args) + new ImmutableTS(n).main(args) + } +} + +class Dummy(val a: Int) extends math.Ordered[Dummy] { + def compare(other: Dummy) = this.a - other.a + + override def toString = a.toString + } + + +class JavaUtilTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: java.util.TreeSet[Dummy] = null + + def run = { + t = new java.util.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t add elem + i += 1 + } + } +} + +class MutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: collection.mutable.TreeSet[Dummy] = null + + def run = { + t = collection.mutable.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t += elem + i += 1 + } + } +} + +class ImmutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: collection.immutable.TreeSet[Dummy] = null + + def run = { + t = collection.immutable.TreeSet[Dummy]() + + var i = 0 + while (i < length) { + val elem = data(i) + t += elem + i += 1 + } + } +} diff --git a/test/benchmarking/TreeSetIterator.scala b/test/benchmarking/TreeSetIterator.scala new file mode 100644 index 0000000000..c3b19aa29f --- /dev/null +++ b/test/benchmarking/TreeSetIterator.scala @@ -0,0 +1,66 @@ + +object TreeSetIterator { + + def main(args: Array[String]): Unit = { + val n = 500000 + new JavaUtilTS(n).main(args) + new MutableTS(n).main(args) + new ImmutableTS(n).main(args) + } +} + +class Dummy(val a: Int) extends math.Ordered[Dummy] { + def compare(other: Dummy) = this.a - other.a + + override def toString = a.toString + } + + +class JavaUtilTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: java.util.TreeSet[Dummy] = null + + def run = { + t = new java.util.TreeSet[Dummy]() + data foreach { a => t add a } + + var i: Dummy = null + var it = t.iterator + while (it.hasNext) { + i = it.next + } + i + } +} + +class MutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.mutable.TreeSet[Dummy] = null + + def run = { + t = collection.mutable.TreeSet[Dummy](data: _*) + + var i: Dummy = null + var it = t.iterator + while (it.hasNext) { + i = it.next + } + i + } +} + +class ImmutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.immutable.TreeSet[Dummy] = null + + def run = { + t = collection.immutable.TreeSet[Dummy](data: _*) + + var i: Dummy = null + var it = t.iterator + while (it.hasNext) { + i = it.next + } + i + } +} diff --git a/test/benchmarking/TreeSetRemove.scala b/test/benchmarking/TreeSetRemove.scala new file mode 100644 index 0000000000..68c07ce70a --- /dev/null +++ b/test/benchmarking/TreeSetRemove.scala @@ -0,0 +1,66 @@ + +object TreeSetRemove { + + def main(args: Array[String]): Unit = { + val n = 500000 + new JavaUtilTS(n).main(args) + new MutableTS(n).main(args) + new ImmutableTS(n).main(args) + } +} + +class Dummy(val a: Int) extends math.Ordered[Dummy] { + def compare(other: Dummy) = this.a - other.a + + override def toString = a.toString + } + + +class JavaUtilTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: java.util.TreeSet[Dummy] = null + + def run = { + t = new java.util.TreeSet[Dummy]() + data foreach { a => t add a } + + var i = 0 + while (i < length) { + val elem = data(i) + t remove elem + i += 1 + } + } +} + +class MutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.mutable.TreeSet[Dummy] = null + + def run = { + t = collection.mutable.TreeSet[Dummy](data: _*) + + var i = 0 + while (i < length) { + val elem = data(i) + t -= elem + i += 1 + } + } +} + +class ImmutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray + var t: collection.immutable.TreeSet[Dummy] = null + + def run = { + t = collection.immutable.TreeSet[Dummy](data: _*) + + var i = 0 + while (i < length) { + val elem = data(i) + t -= elem + i += 1 + } + } +} diff --git a/test/benchmarking/TreeSetRemoveRandom.scala b/test/benchmarking/TreeSetRemoveRandom.scala new file mode 100644 index 0000000000..4d311679e3 --- /dev/null +++ b/test/benchmarking/TreeSetRemoveRandom.scala @@ -0,0 +1,66 @@ + +object TreeSetRemoveRandom { + + def main(args: Array[String]): Unit = { + val n = 500000 + new JavaUtilTS(n).main(args) + new MutableTS(n).main(args) + new ImmutableTS(n).main(args) + } +} + +class Dummy(val a: Int) extends math.Ordered[Dummy] { + def compare(other: Dummy) = this.a - other.a + + override def toString = a.toString + } + + +class JavaUtilTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: java.util.TreeSet[Dummy] = null + + def run = { + t = new java.util.TreeSet[Dummy]() + data foreach { a => t add a } + + var i = 0 + while (i < length) { + val elem = data(i) + t remove elem + i += 1 + } + } +} + +class MutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: collection.mutable.TreeSet[Dummy] = null + + def run = { + t = collection.mutable.TreeSet[Dummy](data: _*) + + var i = 0 + while (i < length) { + val elem = data(i) + t -= elem + i += 1 + } + } +} + +class ImmutableTS(val length: Int) extends testing.Benchmark { + var data: Array[Dummy] = util.Random.shuffle((0 until length) map { a => new Dummy(a) }) toArray + var t: collection.immutable.TreeSet[Dummy] = null + + def run = { + t = collection.immutable.TreeSet[Dummy](data: _*) + + var i = 0 + while (i < length) { + val elem = data(i) + t -= elem + i += 1 + } + } +} -- cgit v1.2.3 From 06945b6dcfc7bbb0efb5f8429ffeab7fbde9be5b Mon Sep 17 00:00:00 2001 From: Lucien Pereira Date: Sat, 28 Jan 2012 14:52:01 +0100 Subject: Scalacheck test in order to ensure AVL invariants are respected. --- test/files/scalacheck/avl.scala | 114 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 114 insertions(+) create mode 100644 test/files/scalacheck/avl.scala diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala new file mode 100644 index 0000000000..51fb1fe8c3 --- /dev/null +++ b/test/files/scalacheck/avl.scala @@ -0,0 +1,114 @@ +import org.scalacheck.Gen +import org.scalacheck.Prop.forAll +import org.scalacheck.Properties + +import util.logging.ConsoleLogger + +package scala.collection.mutable { + + /** + * Property of an AVL Tree : Any node of the tree has a balance value beetween in [-1; 1] + */ + abstract class AVLTreeTest(name: String) extends Properties(name) with ConsoleLogger { + + def `2^`(n: Int) = (1 to n).fold(1)((a, b) => b*2) + + def capacityMax(depth: Int): Int = `2^`(depth+1) - 1 + + def minDepthForCapacity(x: Int): Int = { + var depth = 0 + while(capacityMax(depth) < x) + depth += 1 + depth + } + + def numberOfElementsInLeftSubTree(n: Int): collection.immutable.IndexedSeq[Int] = { + val mid = n/2 + n%2 + ((1 until mid) + .filter { i => math.abs(minDepthForCapacity(i) - minDepthForCapacity(n-i)) < 2 } + .flatMap { i => Seq(i, n-(i+1)) }).toIndexedSeq.distinct + } + + def makeAllBalancedTree[A](elements: List[A]): List[AVLTree[A]] = elements match { + case Nil => Leaf::Nil + case first::Nil => Node(first, Leaf, Leaf)::Nil + case first::second::Nil => Node(second, Node(first, Leaf, Leaf), Leaf)::Node(first, Leaf, Node(second, Leaf, Leaf))::Nil + case first::second::third::Nil => Node(second, Node(first, Leaf, Leaf), Node(third, Leaf, Leaf))::Nil + case _ => { + val combinations = for { + left <- numberOfElementsInLeftSubTree(elements.size) + root = elements(left) + right = elements.size - (left + 1) + } yield (root, left, right) + (combinations.flatMap(triple => for { + l <- makeAllBalancedTree(elements.take(triple._2)) + r <- makeAllBalancedTree(elements.takeRight(triple._3)) + } yield Node(triple._1, l, r))).toList + } + } + + def genInput: Gen[(Int, List[AVLTree[Int]])] = for { + size <- Gen.choose(20, 25) + elements <- Gen.listOfN(size, Gen.choose(0, 1000)) + selected <- Gen.choose(0, 1000) + } yield { + // selected mustn't be in elements already + val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2)) + (selected*2+1, list) + } + + def genInputDelete: Gen[(Int, List[AVLTree[Int]])] = for { + size <- Gen.choose(20, 25) + elements <- Gen.listOfN(size, Gen.choose(0, 1000)) + e = elements.sorted.distinct + selected <- Gen.choose(0, e.size-1) + } yield { + // selected must be in elements already + val list = makeAllBalancedTree(e) + (e(selected), list) + } + } + + trait AVLInvariants { + self: AVLTreeTest => + + def isBalanced[A](t: AVLTree[A]): Boolean = t match { + case node: Node[A] => math.abs(node.balance) < 2 && (List(node.left, node.right) forall isBalanced) + case Leaf => true + } + + def setup(invariant: AVLTree[Int] => Boolean) = forAll(genInput) { + case (selected: Int, trees: List[AVLTree[Int]]) => + trees.map(tree => invariant(tree)).fold(true)((a, b) => a && b) + } + + property("Every tree is initially balanced.") = setup(isBalanced) + } + + object TestInsert extends AVLTreeTest("Insert") with AVLInvariants { + import math.Ordering.Int + property("`insert` creates a new tree containing the given element. The tree remains balanced.") = forAll(genInput) { + case (selected: Int, trees: List[AVLTree[Int]]) => + trees.map(tree => { + val modifiedTree = tree.insert(selected, Int) + modifiedTree.contains(selected, Int) && isBalanced(modifiedTree) + }).fold(true)((a, b) => a && b) + } + } + + object TestRemove extends AVLTreeTest("Remove") with AVLInvariants { + import math.Ordering.Int + property("`remove` creates a new tree without the given element. The tree remains balanced.") = forAll(genInputDelete) { + case (selected: Int, trees: List[AVLTree[Int]]) => + trees.map(tree => { + val modifiedTree = tree.remove(selected, Int) + tree.contains(selected, Int) && !modifiedTree.contains(selected, Int) && isBalanced(modifiedTree) + }).fold(true)((a, b) => a && b) + } + } +} + +object Test extends Properties("AVL") { + include(scala.collection.mutable.TestInsert) + include(scala.collection.mutable.TestRemove) +} \ No newline at end of file -- cgit v1.2.3 From 92a358aebae6336e4a2df54e5446f43efac71b21 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 28 Jan 2012 12:24:30 -0800 Subject: Cleaned up polymorphic method creation. I love the smell of polymorphic method synthesis in the early afternoon. --- .../scala/reflect/internal/Definitions.scala | 115 +++++++++------------ src/compiler/scala/reflect/internal/StdNames.scala | 2 + src/compiler/scala/reflect/internal/Symbols.scala | 4 + 3 files changed, 54 insertions(+), 67 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 1490d80d7a..19726b694d 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -14,6 +14,14 @@ import PartialFunction._ trait Definitions extends reflect.api.StandardDefinitions { self: SymbolTable => + /** Since both the value parameter types and the result type may + * require access to the type parameter symbols, we model polymorphic + * creation as a function from those symbols to (formal types, result type). + * The Option is to distinguish between nullary methods and empty-param-list + * methods. + */ + private type PolyMethodCreator = List[Symbol] => (Option[List[Type]], Type) + private def newClass(owner: Symbol, name: TypeName, parents: List[Type], flags: Long = 0L): Symbol = { val clazz = owner.newClassSymbol(name, NoPosition, flags) clazz setInfoAndEnter ClassInfoType(parents, newScope, clazz) @@ -311,17 +319,10 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val RemoteInterfaceClass = getRequiredClass("java.rmi.Remote") lazy val RemoteExceptionClass = getRequiredClass("java.rmi.RemoteException") - lazy val RepeatedParamClass = newCovariantPolyClass( - ScalaPackageClass, - tpnme.REPEATED_PARAM_CLASS_NAME, - tparam => seqType(tparam.typeConstructor) - ) - - lazy val JavaRepeatedParamClass = newCovariantPolyClass( - ScalaPackageClass, - tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, - tparam => arrayType(tparam.typeConstructor) - ) + lazy val ByNameParamClass = specialPolyClass(tpnme.BYNAME_PARAM_CLASS_NAME, COVARIANT)(_ => AnyClass.typeConstructor) + lazy val EqualsPatternClass = specialPolyClass(tpnme.EQUALS_PATTERN_NAME, 0L)(_ => AnyClass.typeConstructor) + lazy val JavaRepeatedParamClass = specialPolyClass(tpnme.JAVA_REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => arrayType(tparam.typeConstructor)) + lazy val RepeatedParamClass = specialPolyClass(tpnme.REPEATED_PARAM_CLASS_NAME, COVARIANT)(tparam => seqType(tparam.typeConstructor)) def isByNameParamType(tp: Type) = tp.typeSymbol == ByNameParamClass def isScalaRepeatedParamType(tp: Type) = tp.typeSymbol == RepeatedParamClass @@ -350,15 +351,6 @@ trait Definitions extends reflect.api.StandardDefinitions { case _ => false } - lazy val ByNameParamClass = newCovariantPolyClass( - ScalaPackageClass, - tpnme.BYNAME_PARAM_CLASS_NAME, - tparam => AnyClass.typeConstructor - ) - lazy val EqualsPatternClass = { - val clazz = newClass(ScalaPackageClass, tpnme.EQUALS_PATTERN_NAME, Nil) - clazz setInfo polyType(List(newTypeParam(clazz, 0)), ClassInfoType(anyparam, newScope, clazz)) - } lazy val MatchingStrategyClass = getRequiredClass("scala.MatchingStrategy") // collections classes @@ -637,8 +629,8 @@ trait Definitions extends reflect.api.StandardDefinitions { } // members of class scala.Any - lazy val Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) - lazy val Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL) + lazy val Any_== = newMethod(AnyClass, nme.EQ, anyparam, booltype, FINAL) + lazy val Any_!= = newMethod(AnyClass, nme.NE, anyparam, booltype, FINAL) lazy val Any_equals = newMethod(AnyClass, nme.equals_, anyparam, booltype) lazy val Any_hashCode = newMethod(AnyClass, nme.hashCode_, Nil, inttype) lazy val Any_toString = newMethod(AnyClass, nme.toString_, Nil, stringtype) @@ -653,12 +645,9 @@ trait Definitions extends reflect.api.StandardDefinitions { // Since getClass is not actually a polymorphic method, this requires compiler // participation. At the "Any" level, the return type is Class[_] as it is in // java.lang.Object. Java also special cases the return type. - lazy val Any_getClass = - newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED) - lazy val Any_isInstanceOf = newPolyMethod( - AnyClass, nme.isInstanceOf_, tparam => NullaryMethodType(booltype)) setFlag FINAL - lazy val Any_asInstanceOf = newPolyMethod( - AnyClass, nme.asInstanceOf_, tparam => NullaryMethodType(tparam.typeConstructor)) setFlag FINAL + lazy val Any_getClass = newMethod(AnyClass, nme.getClass_, Nil, getMember(ObjectClass, nme.getClass_).tpe.resultType, DEFERRED) + lazy val Any_isInstanceOf = newT1NullaryMethod(AnyClass, nme.isInstanceOf_, FINAL)(_ => booltype) + lazy val Any_asInstanceOf = newT1NullaryMethod(AnyClass, nme.asInstanceOf_, FINAL)(_.typeConstructor) // members of class java.lang.{ Object, String } lazy val Object_## = newMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL) @@ -666,15 +655,11 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val Object_!= = newMethod(ObjectClass, nme.NE, anyrefparam, booltype, FINAL) lazy val Object_eq = newMethod(ObjectClass, nme.eq, anyrefparam, booltype, FINAL) lazy val Object_ne = newMethod(ObjectClass, nme.ne, anyrefparam, booltype, FINAL) - lazy val Object_synchronized = newPolyMethodCon( - ObjectClass, nme.synchronized_, - tparam => msym => MethodType(msym.newSyntheticValueParams(List(tparam.typeConstructor)), tparam.typeConstructor)) setFlag FINAL - lazy val Object_isInstanceOf = newPolyMethod( - ObjectClass, newTermName("$isInstanceOf"), - tparam => MethodType(List(), booltype)) setFlag (FINAL | SYNTHETIC) - lazy val Object_asInstanceOf = newPolyMethod( - ObjectClass, newTermName("$asInstanceOf"), - tparam => MethodType(List(), tparam.typeConstructor)) setFlag (FINAL | SYNTHETIC) + lazy val Object_isInstanceOf = newT1NoParamsMethod(ObjectClass, nme.isInstanceOf_Ob, FINAL | SYNTHETIC)(_ => booltype) + lazy val Object_asInstanceOf = newT1NoParamsMethod(ObjectClass, nme.asInstanceOf_Ob, FINAL | SYNTHETIC)(_.typeConstructor) + lazy val Object_synchronized = newPolyMethod(1, ObjectClass, nme.synchronized_, FINAL)(tps => + (Some(List(tps.head.typeConstructor)), tps.head.typeConstructor) + ) lazy val String_+ = newMethod(StringClass, nme.raw.PLUS, anyparam, stringtype, FINAL) def Object_getClass = getMember(ObjectClass, nme.getClass_) @@ -686,7 +671,6 @@ trait Definitions extends reflect.api.StandardDefinitions { def Object_hashCode = getMember(ObjectClass, nme.hashCode_) def Object_toString = getMember(ObjectClass, nme.toString_) - // boxed classes lazy val ObjectRefClass = getRequiredClass("scala.runtime.ObjectRef") lazy val VolatileObjectRefClass = getRequiredClass("scala.runtime.VolatileObjectRef") @@ -831,39 +815,36 @@ trait Definitions extends reflect.api.StandardDefinitions { */ private def getModuleOrClass(path: Name): Symbol = getModuleOrClass(path, path.length) - private def newCovariantPolyClass(owner: Symbol, name: TypeName, parent: Symbol => Type): Symbol = { - val clazz = newClass(owner, name, List()) - val tparam = newTypeParam(clazz, 0) setFlag COVARIANT - val p = parent(tparam) -/* p.typeSymbol.initialize - println(p.typeSymbol + " flags: " + Flags.flagsToString(p.typeSymbol.flags)) - val parents = /*if (p.typeSymbol.isTrait) - List(definitions.AnyRefClass.tpe, p) - else*/ List(p) - println("creating " + name + " with parents " + parents) */ - clazz.setInfo( - polyType( - List(tparam), - ClassInfoType(List(AnyRefClass.tpe, p), newScope, clazz))) - } - private def newAlias(owner: Symbol, name: TypeName, alias: Type): Symbol = owner.newAliasType(name) setInfoAndEnter alias - - /** tcon receives the type parameter symbol as argument */ - private def newPolyMethod(owner: Symbol, name: TermName, tcon: Symbol => Type): Symbol = - newPolyMethodCon(owner, name, tparam => msym => tcon(tparam)) - - /** tcon receives the type parameter symbol and the method symbol as arguments */ - private def newPolyMethodCon(owner: Symbol, name: TermName, tcon: Symbol => Symbol => Type): Symbol = { - val msym = owner.info.decls enter owner.newMethod(name.encode) - val tparam = newTypeParam(msym, 0) - - msym setInfo polyType(List(tparam), tcon(tparam)(msym)) + + private def specialPolyClass(name: TypeName, flags: Long)(parentFn: Symbol => Type): Symbol = { + val clazz = newClass(ScalaPackageClass, name, Nil) + val tparam = clazz.newSyntheticTypeParam("T0", flags) + val parents = List(AnyRefClass.tpe, parentFn(tparam)) + + clazz setInfo polyType(List(tparam), ClassInfoType(parents, newScope, clazz)) } + + def newPolyMethod(typeParamCount: Int, owner: Symbol, name: TermName, flags: Long)(createFn: PolyMethodCreator): Symbol = { + val msym = owner.newMethod(name.encode, NoPosition, flags) + val tparams = msym.newSyntheticTypeParams(typeParamCount) + val mtpe = createFn(tparams) match { + case (Some(formals), restpe) => MethodType(msym.newSyntheticValueParams(formals), restpe) + case (_, restpe) => NullaryMethodType(restpe) + } - private def newTypeParam(owner: Symbol, index: Int): Symbol = - owner.newTypeParameter(newTypeName("T" + index)) setInfo TypeBounds.empty + msym setInfoAndEnter polyType(tparams, mtpe) + } + + /** T1 means one type parameter. + */ + def newT1NullaryMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = { + newPolyMethod(1, owner, name, flags)(tparams => (None, createFn(tparams.head))) + } + def newT1NoParamsMethod(owner: Symbol, name: TermName, flags: Long)(createFn: Symbol => Type): Symbol = { + newPolyMethod(1, owner, name, flags)(tparams => (Some(Nil), createFn(tparams.head))) + } lazy val boxedClassValues = boxedClass.values.toSet lazy val isUnbox = unboxMethod.values.toSet diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala index aba00088f9..b3069adfb4 100644 --- a/src/compiler/scala/reflect/internal/StdNames.scala +++ b/src/compiler/scala/reflect/internal/StdNames.scala @@ -291,6 +291,7 @@ trait StdNames extends NameManglers { self: SymbolTable => val array_update : NameType = "array_update" val arraycopy: NameType = "arraycopy" val asInstanceOf_ : NameType = "asInstanceOf" + val asInstanceOf_Ob : NameType = "$asInstanceOf" val asTypeConstructor: NameType = "asTypeConstructor" val assert_ : NameType = "assert" val assume_ : NameType = "assume" @@ -336,6 +337,7 @@ trait StdNames extends NameManglers { self: SymbolTable => val isDefinedAt: NameType = "isDefinedAt" val isEmpty: NameType = "isEmpty" val isInstanceOf_ : NameType = "isInstanceOf" + val isInstanceOf_Ob : NameType = "$isInstanceOf" val java: NameType = "java" val lang: NameType = "lang" val length: NameType = "length" diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 9f8476a6fe..e3355345f0 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -251,6 +251,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => def freshName() = { cnt += 1; nme.syntheticParamName(cnt) } mmap(argtypess)(tp => newValueParameter(freshName(), focusPos(owner.pos), SYNTHETIC) setInfo tp) } + + def newSyntheticTypeParam(): Symbol = newSyntheticTypeParam("T0", 0L) + def newSyntheticTypeParam(name: String, newFlags: Long): Symbol = newTypeParameter(newTypeName(name), NoPosition, newFlags) setInfo TypeBounds.empty + def newSyntheticTypeParams(num: Int): List[Symbol] = (0 until num).toList map (n => newSyntheticTypeParam("T" + n, 0L)) /** Create a new existential type skolem with this symbol its owner, * based on the given symbol and origin. -- cgit v1.2.3 From d0c5ee4c031a126cee4c552a34cf732716568076 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 28 Jan 2012 15:37:07 -0800 Subject: More method synthesis unification. --- .../scala/reflect/internal/Definitions.scala | 37 +++-- src/compiler/scala/reflect/internal/Symbols.scala | 12 ++ .../scala/tools/nsc/transform/Erasure.scala | 2 +- src/compiler/scala/tools/nsc/transform/Mixin.scala | 2 +- .../tools/nsc/typechecker/MethodSynthesis.scala | 133 ++++++++++++++++ .../tools/nsc/typechecker/SyntheticMethods.scala | 167 ++------------------- 6 files changed, 182 insertions(+), 171 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 19726b694d..8114be20d5 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -421,24 +421,27 @@ trait Definitions extends reflect.api.StandardDefinitions { * information into the toString method. */ def manifestToType(m: OptManifest[_]): Type = m match { - case x: AnyValManifest[_] => - getClassIfDefined("scala." + x).tpe case m: ClassManifest[_] => - val name = m.erasure.getName - if (name endsWith nme.MODULE_SUFFIX_STRING) - getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING).tpe - else { - val sym = getClassIfDefined(name) - val args = m.typeArguments - - if (sym eq NoSymbol) NoType - else if (args.isEmpty) sym.tpe - else appliedType(sym.typeConstructor, args map manifestToType) - } + val sym = manifestToSymbol(m) + val args = m.typeArguments + + if ((sym eq NoSymbol) || args.isEmpty) sym.tpe + else appliedType(sym.typeConstructor, args map manifestToType) case _ => NoType } + def manifestToSymbol(m: ClassManifest[_]): Symbol = m match { + case x: scala.reflect.AnyValManifest[_] => + getMember(ScalaPackageClass, newTypeName("" + x)) + case _ => + val name = m.erasure.getName + if (name endsWith nme.MODULE_SUFFIX_STRING) + getModuleIfDefined(name stripSuffix nme.MODULE_SUFFIX_STRING) + else + getClassIfDefined(name) + } + // The given symbol represents either String.+ or StringAdd.+ def isStringAddition(sym: Symbol) = sym == String_+ || sym == StringAdd_+ def isArrowAssoc(sym: Symbol) = ArrowAssocClass.tpe.decls.toList contains sym @@ -586,6 +589,14 @@ trait Definitions extends reflect.api.StandardDefinitions { case _ => NoType } + /** To avoid unchecked warnings on polymorphic classes, translate + * a Foo[T] into a Foo[_] for use in the pattern matcher. + */ + def typeCaseType(clazz: Symbol) = clazz.tpe.normalize match { + case TypeRef(_, sym, args) if args.nonEmpty => newExistentialType(sym.typeParams, clazz.tpe) + case tp => tp + } + def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg)) def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg)) def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg)) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index e3355345f0..b3df2b0498 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1832,6 +1832,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => } } + /** Remove any access boundary and clear flags PROTECTED | PRIVATE. + */ + def makePublic = this setPrivateWithin NoSymbol resetFlag AccessFlags + + /** The first parameter to the first argument list of this method, + * or NoSymbol if inapplicable. + */ + def firstParam = info.params match { + case p :: _ => p + case _ => NoSymbol + } + /** change name by appending $$ * Do the same for any accessed symbols or setters/getters */ diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index b342b95742..fe479a5375 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -797,7 +797,7 @@ abstract class Erasure extends AddInterfaces // && (bridge.paramss.nonEmpty && bridge.paramss.head.nonEmpty && bridge.paramss.head.tail.isEmpty) // does the first argument list has exactly one argument -- for user-defined unapplies we can't be sure && !(atPhase(phase.next)(member.tpe <:< other.tpe))) { // no static guarantees (TODO: is the subtype test ever true?) import CODE._ - val typeTest = gen.mkIsInstanceOf(REF(bridge.paramss.head.head), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry + val typeTest = gen.mkIsInstanceOf(REF(bridge.firstParam), member.tpe.params.head.tpe, any = true, wrapInApply = true) // any = true since we're before erasure (?), wrapInapply is true since we're after uncurry // println("unapp type test: "+ typeTest) IF (typeTest) THEN bridgingCall ELSE REF(NoneModule) } else bridgingCall diff --git a/src/compiler/scala/tools/nsc/transform/Mixin.scala b/src/compiler/scala/tools/nsc/transform/Mixin.scala index bd29336703..b3b7596f9a 100644 --- a/src/compiler/scala/tools/nsc/transform/Mixin.scala +++ b/src/compiler/scala/tools/nsc/transform/Mixin.scala @@ -1053,7 +1053,7 @@ abstract class Mixin extends InfoTransform with ast.TreeDSL { else accessedRef match { case Literal(_) => accessedRef case _ => - val init = Assign(accessedRef, Ident(sym.paramss.head.head)) + val init = Assign(accessedRef, Ident(sym.firstParam)) val getter = sym.getter(clazz) if (!needsInitFlag(getter)) init diff --git a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala index c6ca9870c3..0c32ff32c0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala +++ b/src/compiler/scala/tools/nsc/typechecker/MethodSynthesis.scala @@ -17,6 +17,139 @@ trait MethodSynthesis { import global._ import definitions._ + import CODE._ + + object synthesisUtil { + type M[T] = Manifest[T] + type CM[T] = ClassManifest[T] + + def ValOrDefDef(sym: Symbol, body: Tree) = + if (sym.isLazy) ValDef(sym, body) + else DefDef(sym, body) + + def applyTypeInternal(manifests: List[M[_]]): Type = { + val symbols = manifests map manifestToSymbol + val container :: args = symbols + val tparams = container.typeConstructor.typeParams + + // Conservative at present - if manifests were more usable this could do a lot more. + require(symbols forall (_ ne NoSymbol), "Must find all manifests: " + symbols) + require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) + require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) + + typeRef(container.typeConstructor.prefix, container, args map (_.tpe)) + } + + def companionType[T](implicit m: M[T]) = + getRequiredModule(m.erasure.getName).tpe + + // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]` + def applyType[CC](implicit m1: M[CC]): Type = + applyTypeInternal(List(m1)) + + def applyType[CC[X1], X1](implicit m1: M[CC[_]], m2: M[X1]): Type = + applyTypeInternal(List(m1, m2)) + + def applyType[CC[X1, X2], X1, X2](implicit m1: M[CC[_,_]], m2: M[X1], m3: M[X2]): Type = + applyTypeInternal(List(m1, m2, m3)) + + def applyType[CC[X1, X2, X3], X1, X2, X3](implicit m1: M[CC[_,_,_]], m2: M[X1], m3: M[X2], m4: M[X3]): Type = + applyTypeInternal(List(m1, m2, m3, m4)) + + def newMethodType[F](owner: Symbol)(implicit m: Manifest[F]): Type = { + val fnSymbol = manifestToSymbol(m) + assert(fnSymbol isSubClass FunctionClass(m.typeArguments.size - 1), (owner, m)) + val symbols = m.typeArguments map (m => manifestToSymbol(m)) + val formals = symbols.init map (_.typeConstructor) + val params = owner newSyntheticValueParams formals + + MethodType(params, symbols.last.typeConstructor) + } + } + import synthesisUtil._ + + class ClassMethodSynthesis(val clazz: Symbol, localTyper: Typer) { + private def isOverride(name: TermName) = + clazzMember(name).alternatives exists (sym => !sym.isDeferred && (sym.owner != clazz)) + + def newMethodFlags(name: TermName) = { + val overrideFlag = if (isOverride(name)) OVERRIDE else 0L + overrideFlag | SYNTHETIC + } + def newMethodFlags(method: Symbol) = { + val overrideFlag = if (isOverride(method.name)) OVERRIDE else 0L + (method.flags | overrideFlag | SYNTHETIC) & ~DEFERRED + } + + private def finishMethod(method: Symbol, f: Symbol => Tree): Tree = + logResult("finishMethod")(localTyper typed ValOrDefDef(method, f(method))) + + private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = { + val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name)) + finishMethod(m setInfoAndEnter info, f) + } + private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = { + val m = clazz.newMethod(name.toTermName, clazz.pos.focus, newMethodFlags(name)) + finishMethod(m setInfoAndEnter infoFn(m), f) + } + private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = { + val m = original.cloneSymbol(clazz, newMethodFlags(original)) setPos clazz.pos.focus + m.name = name + finishMethod(clazz.info.decls enter m, f) + } + + private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = + cloneInternal(original, f, original.name) + + def clazzMember(name: Name) = clazz.info nonPrivateMember name + def typeInClazz(sym: Symbol) = clazz.thisType memberType sym + + /** Function argument takes the newly created method symbol of + * the same type as `name` in clazz, and returns the tree to be + * added to the template. + */ + def overrideMethod(name: Name)(f: Symbol => Tree): Tree = + overrideMethod(clazzMember(name))(f) + + def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = + cloneInternal(original, sym => f(sym setFlag OVERRIDE)) + + def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree = + cloneInternal(original, f, nameFn(original.name)) + + def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree = + createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType)) + + def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree = + createInternal(name, f, NullaryMethodType(returnType)) + + def createMethod(original: Symbol)(f: Symbol => Tree): Tree = + createInternal(original.name, f, original.info) + + def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree = + createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident))) + + def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = { + createMethod(name, List(IntClass.tpe), returnType) { m => + val arg0 = Ident(m.firstParam) + val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0) + val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default + + Match(arg0, cases) + } + } + + // def foo() = constant + def constantMethod(name: Name, value: Any): Tree = { + val constant = Constant(value) + createMethod(name, Nil, constant.tpe)(_ => Literal(constant)) + } + // def foo = constant + def constantNullary(name: Name, value: Any): Tree = { + val constant = Constant(value) + createMethod(name, constant.tpe)(_ => Literal(constant)) + } + } /** There are two key methods in here. * diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 4e986dc5aa..4ea21b1c44 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -36,158 +36,13 @@ trait SyntheticMethods extends ast.TreeDSL { import definitions._ import CODE._ - private object util { - private type CM[T] = ClassManifest[T] - - def ValOrDefDef(sym: Symbol, body: Tree) = - if (sym.isLazy) ValDef(sym, body) - else DefDef(sym, body) - - /** To avoid unchecked warnings on polymorphic classes. - */ - def clazzTypeToTest(clazz: Symbol) = clazz.tpe.normalize match { - case TypeRef(_, sym, args) if args.nonEmpty => newExistentialType(sym.typeParams, clazz.tpe) - case tp => tp - } - - def makeMethodPublic(method: Symbol): Symbol = ( - method setPrivateWithin NoSymbol resetFlag AccessFlags - ) - - def methodArg(method: Symbol, idx: Int): Tree = Ident(method.paramss.head(idx)) - - private def applyTypeInternal(manifests: List[CM[_]]): Type = { - val symbols = manifests map manifestToSymbol - val container :: args = symbols - val tparams = container.typeConstructor.typeParams - - // Overly conservative at present - if manifests were more usable - // this could do a lot more. - require(symbols forall (_ ne NoSymbol), "Must find all manifests: " + symbols) - require(container.owner.isPackageClass, "Container must be a top-level class in a package: " + container) - require(tparams.size == args.size, "Arguments must match type constructor arity: " + tparams + ", " + args) - require(args forall (_.typeConstructor.typeParams.isEmpty), "Arguments must be unparameterized: " + args) - - typeRef(container.typeConstructor.prefix, container, args map (_.tpe)) - } - - def manifestToSymbol(m: CM[_]): Symbol = m match { - case x: scala.reflect.AnyValManifest[_] => getMember(ScalaPackageClass, newTermName("" + x)) - case _ => getClassIfDefined(m.erasure.getName) - } - def companionType[T](implicit m: CM[T]) = - getRequiredModule(m.erasure.getName).tpe - - // Use these like `applyType[List, Int]` or `applyType[Map, Int, String]` - def applyType[M](implicit m1: CM[M]): Type = - applyTypeInternal(List(m1)) - - def applyType[M[X1], X1](implicit m1: CM[M[_]], m2: CM[X1]): Type = - applyTypeInternal(List(m1, m2)) - - def applyType[M[X1, X2], X1, X2](implicit m1: CM[M[_,_]], m2: CM[X1], m3: CM[X2]): Type = - applyTypeInternal(List(m1, m2, m3)) - - def applyType[M[X1, X2, X3], X1, X2, X3](implicit m1: CM[M[_,_,_]], m2: CM[X1], m3: CM[X2], m4: CM[X3]): Type = - applyTypeInternal(List(m1, m2, m3, m4)) - } - import util._ - - class MethodSynthesis(val clazz: Symbol, localTyper: Typer) { - private def isOverride(method: Symbol) = - clazzMember(method.name).alternatives exists (sym => (sym != method) && !sym.isDeferred) - - private def setMethodFlags(method: Symbol): Symbol = { - val overrideFlag = if (isOverride(method)) OVERRIDE else 0L - - method setFlag (overrideFlag | SYNTHETIC) resetFlag DEFERRED - } - - private def finishMethod(method: Symbol, f: Symbol => Tree): Tree = { - setMethodFlags(method) - clazz.info.decls enter method - logResult("finishMethod")(localTyper typed ValOrDefDef(method, f(method))) - } - - private def createInternal(name: Name, f: Symbol => Tree, info: Type): Tree = { - val m = clazz.newMethod(name.toTermName, clazz.pos.focus) - m setInfo info - finishMethod(m, f) - } - private def createInternal(name: Name, f: Symbol => Tree, infoFn: Symbol => Type): Tree = { - val m = clazz.newMethod(name.toTermName, clazz.pos.focus) - m setInfo infoFn(m) - finishMethod(m, f) - } - private def cloneInternal(original: Symbol, f: Symbol => Tree, name: Name): Tree = { - val m = original.cloneSymbol(clazz) setPos clazz.pos.focus - m.name = name - finishMethod(m, f) - } - - private def cloneInternal(original: Symbol, f: Symbol => Tree): Tree = - cloneInternal(original, f, original.name) - - def clazzMember(name: Name) = clazz.info nonPrivateMember name match { - case NoSymbol => log("In " + clazz + ", " + name + " not found: " + clazz.info) ; NoSymbol - case sym => sym - } - def typeInClazz(sym: Symbol) = clazz.thisType memberType sym - - /** Function argument takes the newly created method symbol of - * the same type as `name` in clazz, and returns the tree to be - * added to the template. - */ - def overrideMethod(name: Name)(f: Symbol => Tree): Tree = - overrideMethod(clazzMember(name))(f) - - def overrideMethod(original: Symbol)(f: Symbol => Tree): Tree = - cloneInternal(original, sym => f(sym setFlag OVERRIDE)) - - def deriveMethod(original: Symbol, nameFn: Name => Name)(f: Symbol => Tree): Tree = - cloneInternal(original, f, nameFn(original.name)) - - def createMethod(name: Name, paramTypes: List[Type], returnType: Type)(f: Symbol => Tree): Tree = - createInternal(name, f, (m: Symbol) => MethodType(m newSyntheticValueParams paramTypes, returnType)) - - def createMethod(name: Name, returnType: Type)(f: Symbol => Tree): Tree = - createInternal(name, f, NullaryMethodType(returnType)) - - def createMethod(original: Symbol)(f: Symbol => Tree): Tree = - createInternal(original.name, f, original.info) - - def forwardMethod(original: Symbol, newMethod: Symbol)(transformArgs: List[Tree] => List[Tree]): Tree = - createMethod(original)(m => gen.mkMethodCall(newMethod, transformArgs(m.paramss.head map Ident))) - - def createSwitchMethod(name: Name, range: Seq[Int], returnType: Type)(f: Int => Tree) = { - createMethod(name, List(IntClass.tpe), returnType) { m => - val arg0 = methodArg(m, 0) - val default = DEFAULT ==> THROW(IndexOutOfBoundsExceptionClass, arg0) - val cases = range.map(num => CASE(LIT(num)) ==> f(num)).toList :+ default - - Match(arg0, cases) - } - } - - // def foo() = constant - def constantMethod(name: Name, value: Any): Tree = { - val constant = Constant(value) - createMethod(name, Nil, constant.tpe)(_ => Literal(constant)) - } - // def foo = constant - def constantNullary(name: Name, value: Any): Tree = { - val constant = Constant(value) - createMethod(name, constant.tpe)(_ => Literal(constant)) - } - } - /** Add the synthetic methods to case classes. */ def addSyntheticMethods(templ: Template, clazz0: Symbol, context: Context): Template = { if (phase.erasedTypes) return templ - val synthesizer = new MethodSynthesis( + val synthesizer = new ClassMethodSynthesis( clazz0, newTyper( if (reporter.hasErrors) context makeSilent false else context ) ) @@ -212,11 +67,12 @@ trait SyntheticMethods extends ast.TreeDSL { // like Manifests and Arrays which are not robust and infer things // which they shouldn't. val accessorLub = ( - if (opt.experimental) + if (opt.experimental) { global.weakLub(accessors map (_.tpe.finalResultType))._1 match { case RefinedType(parents, decls) if !decls.isEmpty => intersectionType(parents) case tp => tp } + } else AnyClass.tpe ) @@ -258,11 +114,10 @@ trait SyntheticMethods extends ast.TreeDSL { /** The canEqual method for case classes. * def canEqual(that: Any) = that.isInstanceOf[This] */ - def canEqualMethod: Tree = { - createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m => - methodArg(m, 0) IS_OBJ clazzTypeToTest(clazz) - ) - } + def canEqualMethod: Tree = ( + createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m => + Ident(m.firstParam) IS_OBJ typeCaseType(clazz)) + ) /** The equality method for case classes. * 0 args: @@ -276,8 +131,8 @@ trait SyntheticMethods extends ast.TreeDSL { * } */ def equalsClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m => - val arg0 = methodArg(m, 0) - val thatTest = gen.mkIsInstanceOf(arg0, clazzTypeToTest(clazz), true, false) + val arg0 = Ident(m.firstParam) + val thatTest = gen.mkIsInstanceOf(arg0, typeCaseType(clazz), true, false) val thatCast = gen.mkCast(arg0, clazz.tpe) def argsBody: Tree = { @@ -331,7 +186,7 @@ trait SyntheticMethods extends ast.TreeDSL { Object_hashCode -> (() => constantMethod(nme.hashCode_, clazz.name.decode.hashCode)), Object_toString -> (() => constantMethod(nme.toString_, clazz.name.decode)) // Not needed, as reference equality is the default. - // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ methodArg(m, 0))) + // Object_equals -> (() => createMethod(Object_equals)(m => This(clazz) ANY_EQ Ident(m.firstParam))) ) /** If you serialize a singleton and then deserialize it twice, @@ -381,7 +236,7 @@ trait SyntheticMethods extends ast.TreeDSL { for (ddef @ DefDef(_, _, _, _, _, _) <- templ.body ; if isRewrite(ddef.symbol)) { val original = ddef.symbol val newAcc = deriveMethod(ddef.symbol, name => context.unit.freshTermName(name + "$")) { newAcc => - makeMethodPublic(newAcc) + newAcc.makePublic newAcc resetFlag (ACCESSOR | PARAMACCESSOR) ddef.rhs.duplicate } -- cgit v1.2.3 From 4ab88fbe3ecc5d84a0dec2d8acfbb1687bdd5bd5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 28 Jan 2012 23:25:31 -0800 Subject: Bonus test case for SI-3999. --- test/files/pos/t3999b.scala | 20 ++++++++++++++++++++ 1 file changed, 20 insertions(+) create mode 100644 test/files/pos/t3999b.scala diff --git a/test/files/pos/t3999b.scala b/test/files/pos/t3999b.scala new file mode 100644 index 0000000000..d3fe108479 --- /dev/null +++ b/test/files/pos/t3999b.scala @@ -0,0 +1,20 @@ +object `package` { + trait Score { def toString : String } + trait Test[+T <: Score] { def apply(s : String) : T } + + case class FT(f : Float) extends Score + implicit object FT extends Test[FT] { def apply(s : String) : FT = new FT(s.toFloat) } + + case class IT(i : Int) extends Score + implicit object IT extends Test[IT] { def apply(s : String) : IT = new IT(s.toInt) } +} + +class TT[+T <: Score](implicit val tb : Test[T]) { + def read(s : String) : T = tb(s) +} + +object Tester { + val tt = new TT[FT] + val r = tt.read("1.0") + r.toString +} \ No newline at end of file -- cgit v1.2.3 From 396c01b2c10fe361e92b060e9d524921bd3849d6 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 29 Jan 2012 15:59:08 +0100 Subject: Test for https://issues.scala-lang.org/browse/SI-5418 --- test/pending/run/t5418.check | 0 test/pending/run/t5418.scala | 14 ++++++++++++++ 2 files changed, 14 insertions(+) create mode 100644 test/pending/run/t5418.check create mode 100644 test/pending/run/t5418.scala diff --git a/test/pending/run/t5418.check b/test/pending/run/t5418.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/t5418.scala b/test/pending/run/t5418.scala new file mode 100644 index 0000000000..065710f15e --- /dev/null +++ b/test/pending/run/t5418.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + new Object().getClass + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} \ No newline at end of file -- cgit v1.2.3 From 818afc61dd508d601369e7a881eb0d2b97e07b77 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 29 Jan 2012 13:47:17 -0800 Subject: Test case closes SI-4515. --- test/files/neg/t4515.check | 6 ++++++ test/files/neg/t4515.scala | 41 +++++++++++++++++++++++++++++++++++++++++ 2 files changed, 47 insertions(+) create mode 100644 test/files/neg/t4515.check create mode 100644 test/files/neg/t4515.scala diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check new file mode 100644 index 0000000000..ce5350b35f --- /dev/null +++ b/test/files/neg/t4515.check @@ -0,0 +1,6 @@ +t4515.scala:37: error: type mismatch; + found : _0(in value $anonfun) where type _0(in value $anonfun) + required: (some other)_0(in value $anonfun) where type +(some other)_0(in value $anonfun) + handler.onEvent(target, ctx.getEvent, node, ctx) + ^ +one error found diff --git a/test/files/neg/t4515.scala b/test/files/neg/t4515.scala new file mode 100644 index 0000000000..63049f201d --- /dev/null +++ b/test/files/neg/t4515.scala @@ -0,0 +1,41 @@ +import scala.collection.mutable.HashMap + +object Main { + trait Target { } + + trait PushEventContext[EventType] { + def getEvent: EventType + } + trait PushNode[EventType] { } + trait DerivedPushNode[EventType] extends PushNode[EventType] { } + + trait HandlerBase[EventType] { + def onEvent(target: Target, + event: EventType, + node: PushNode[EventType], + ctx: PushEventContext[EventType]): Unit + } + val handlers = new HashMap[DerivedPushNode[_], HandlerBase[_]] + + object TimerPushService { + private val INSTANCE: TimerPushService = new TimerPushService + def get: TimerPushService = INSTANCE + } + + class TimerPushService { + def add[EventType](node: DerivedPushNode[EventType], + context: PushEventContext[EventType]): Unit = {} + + def pollEvents[EventType](node: DerivedPushNode[EventType]): List[PushEventContext[EventType]] = + Nil + } + + def onTimer(target: Target) { + val pushService = TimerPushService.get + for ((node, handler) <- handlers) { + for (ctx <- pushService.pollEvents(node)) { + handler.onEvent(target, ctx.getEvent, node, ctx) + } + } + } +} \ No newline at end of file -- cgit v1.2.3 From 58679b953933494b5f06d61e82257391bba4091c Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Mon, 30 Jan 2012 10:47:24 +0100 Subject: Closes #4336. Some of the type params might already be instantiated if explicit type application is done. Review by @adriaanm --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 2 +- test/files/pos/t4336.scala | 19 +++++++++++++++++++ 2 files changed, 20 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/t4336.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 770b55d6ab..d3ff331f98 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -793,7 +793,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val tree0 = etaExpand(context.unit, tree) // println("eta "+tree+" ---> "+tree0+":"+tree0.tpe+" undet: "+context.undetparams+ " mode: "+Integer.toHexString(mode)) - if (meth.typeParams.nonEmpty) { + if (context.undetparams.nonEmpty) { // #2624: need to infer type arguments for eta expansion of a polymorphic method // context.undetparams contains clones of meth.typeParams (fresh ones were generated in etaExpand) // need to run typer on tree0, since etaExpansion sets the tpe's of its subtrees to null diff --git a/test/files/pos/t4336.scala b/test/files/pos/t4336.scala new file mode 100644 index 0000000000..e10d001585 --- /dev/null +++ b/test/files/pos/t4336.scala @@ -0,0 +1,19 @@ +object Main { + class NonGeneric {} + class Generic[T] {} + + class Composite { + def contains(setup : Composite => Unit) : Composite = this + } + + def generic[T](parent: Composite): Generic[T] = new Generic[T] + def nonGeneric(parent: Composite): NonGeneric = new NonGeneric + + new Composite().contains( + nonGeneric // should have type Composite => NonGeneric + ) + + new Composite().contains( + generic[Int] // should have type Composite => Generic[Int] + ) +} -- cgit v1.2.3 From 5725b2562070f9c37d5ca51008ddef0457a37121 Mon Sep 17 00:00:00 2001 From: aleksandar Date: Mon, 30 Jan 2012 13:03:23 +0100 Subject: Fixed the benchmarks a bit. --- test/benchmarking/TreeSetInsert.scala | 15 +++++++++------ test/benchmarking/TreeSetIterator.scala | 15 +++++++++------ test/benchmarking/TreeSetRemove.scala | 15 +++++++++------ 3 files changed, 27 insertions(+), 18 deletions(-) diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala index 61b064ae33..9ede8aedc5 100644 --- a/test/benchmarking/TreeSetInsert.scala +++ b/test/benchmarking/TreeSetInsert.scala @@ -3,9 +3,9 @@ object TreeSetInsert { def main(args: Array[String]): Unit = { val n = 500000 - new JavaUtilTS(n).main(args) - new MutableTS(n).main(args) - new ImmutableTS(n).main(args) + JavaUtilTS.main(args) + MutableTS.main(args) + ImmutableTS.main(args) } } @@ -16,7 +16,8 @@ class Dummy(val a: Int) extends math.Ordered[Dummy] { } -class JavaUtilTS(val length: Int) extends testing.Benchmark { +object JavaUtilTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: java.util.TreeSet[Dummy] = null @@ -32,7 +33,8 @@ class JavaUtilTS(val length: Int) extends testing.Benchmark { } } -class MutableTS(val length: Int) extends testing.Benchmark { +object MutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.mutable.TreeSet[Dummy] = null @@ -48,7 +50,8 @@ class MutableTS(val length: Int) extends testing.Benchmark { } } -class ImmutableTS(val length: Int) extends testing.Benchmark { +object ImmutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.immutable.TreeSet[Dummy] = null diff --git a/test/benchmarking/TreeSetIterator.scala b/test/benchmarking/TreeSetIterator.scala index c3b19aa29f..08c20e8b0c 100644 --- a/test/benchmarking/TreeSetIterator.scala +++ b/test/benchmarking/TreeSetIterator.scala @@ -3,9 +3,9 @@ object TreeSetIterator { def main(args: Array[String]): Unit = { val n = 500000 - new JavaUtilTS(n).main(args) - new MutableTS(n).main(args) - new ImmutableTS(n).main(args) + JavaUtilTS.main(args) + MutableTS.main(args) + ImmutableTS.main(args) } } @@ -16,7 +16,8 @@ class Dummy(val a: Int) extends math.Ordered[Dummy] { } -class JavaUtilTS(val length: Int) extends testing.Benchmark { +object JavaUtilTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: java.util.TreeSet[Dummy] = null @@ -33,7 +34,8 @@ class JavaUtilTS(val length: Int) extends testing.Benchmark { } } -class MutableTS(val length: Int) extends testing.Benchmark { +object MutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.mutable.TreeSet[Dummy] = null @@ -49,7 +51,8 @@ class MutableTS(val length: Int) extends testing.Benchmark { } } -class ImmutableTS(val length: Int) extends testing.Benchmark { +object ImmutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.immutable.TreeSet[Dummy] = null diff --git a/test/benchmarking/TreeSetRemove.scala b/test/benchmarking/TreeSetRemove.scala index 68c07ce70a..f84066f336 100644 --- a/test/benchmarking/TreeSetRemove.scala +++ b/test/benchmarking/TreeSetRemove.scala @@ -3,9 +3,9 @@ object TreeSetRemove { def main(args: Array[String]): Unit = { val n = 500000 - new JavaUtilTS(n).main(args) - new MutableTS(n).main(args) - new ImmutableTS(n).main(args) + JavaUtilTS.main(args) + MutableTS.main(args) + ImmutableTS.main(args) } } @@ -16,7 +16,8 @@ class Dummy(val a: Int) extends math.Ordered[Dummy] { } -class JavaUtilTS(val length: Int) extends testing.Benchmark { +object JavaUtilTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: java.util.TreeSet[Dummy] = null @@ -33,7 +34,8 @@ class JavaUtilTS(val length: Int) extends testing.Benchmark { } } -class MutableTS(val length: Int) extends testing.Benchmark { +object MutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.mutable.TreeSet[Dummy] = null @@ -49,7 +51,8 @@ class MutableTS(val length: Int) extends testing.Benchmark { } } -class ImmutableTS(val length: Int) extends testing.Benchmark { +object ImmutableTS extends testing.Benchmark { + val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray var t: collection.immutable.TreeSet[Dummy] = null -- cgit v1.2.3 From a70d338afaef9a42bf13334267d07a43ad10c5c0 Mon Sep 17 00:00:00 2001 From: aleksandar Date: Mon, 30 Jan 2012 14:00:42 +0100 Subject: Changed the serialVersionUID in List serialization start marker. --- src/library/scala/collection/immutable/List.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/library/scala/collection/immutable/List.scala b/src/library/scala/collection/immutable/List.scala index f789de9fac..5f3f9b717f 100644 --- a/src/library/scala/collection/immutable/List.scala +++ b/src/library/scala/collection/immutable/List.scala @@ -620,7 +620,7 @@ object List extends SeqFactory[List] { } /** Only used for list serialization */ -@SerialVersionUID(0L - 8476791151975527571L) +@SerialVersionUID(0L - 8287891243975527522L) private[scala] case object ListSerializeStart /** Only used for list serialization */ -- cgit v1.2.3 From a4d6992280b179295142f2f7c7d138ec7f08039b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 16 Dec 2011 14:20:36 -0800 Subject: A couple power mod conveniences. And disambiguations. And renamed all kinds of methods to something less inscrutable. Moved all the instance-to-compiler-structure implicit behind a wrapper to avoid accidental conversions. The wrapper has a single method, ?, which you can experience like so: // Sorry tab completion doesn't yet figure out implicit conversions scala> val x = Map(1 -> 2).? x: $r.power.InternalInfo[scala.collection.immutable.Map[Int,Int]] = Map(1 -> 2) (Map) scala> x. asInstanceOf baseTypeSeq baseTypeSeqMap baseTypeWhichDefines companion decls declsOriginal declsOverride erasure fullManifest glb info isInstanceOf lub memberMethods memberTypes members membersDeclared membersInherited membersUnabridged moduleClass name owner owners pkg pkgClass pkgClassSymbols pkgClasses pkgMembers pkgName pkgSymbols shortClass signature symbol toString tpe // This uses an implicit to install sigs (and another for '>' which is // the generic printing function) but transparency, one step at a time. scala> Map(1 -> 2).?.membersDeclared.sigs > def $init$(): Unit override def empty: scala.collection.immutable.Map[A,B] override def toMap[T,U](implicit ev: <:<[(A, B),(T, U)]): scala.collection.immutable.Map[T,U] override def seq: scala.collection.immutable.Map[A,B] def withDefault[B1 >: B](d: A => B1): scala.collection.immutable.Map[A,B1] def withDefaultValue[B1 >: B](d: B1): scala.collection.immutable.Map[A,B1] override def updated[B1 >: B](key: A,value: B1): scala.collection.immutable.Map[A,B1] def +[B1 >: B](kv: (A, B1)): scala.collection.immutable.Map[A,B1] --- .../scala/tools/nsc/interpreter/IMain.scala | 3 + .../scala/tools/nsc/interpreter/Power.scala | 144 +++++++++++++-------- 2 files changed, 91 insertions(+), 56 deletions(-) diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 8cdd2334ab..567d6c2f78 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -1125,6 +1125,9 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val termname = newTypeName(name) findName(termname) getOrElse getModuleIfDefined(termname) } + def types[T: ClassManifest] : Symbol = types(classManifest[T].erasure.getName) + def terms[T: ClassManifest] : Symbol = terms(classManifest[T].erasure.getName) + def apply[T: ClassManifest] : Symbol = apply(classManifest[T].erasure.getName) /** the previous requests this interpreter has processed */ private lazy val prevRequests = mutable.ListBuffer[Request]() diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index b4a9b9b0e3..2ec41506ab 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -20,7 +20,7 @@ import io.{ Path } class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: ReplValsImpl) { import intp.{ beQuietDuring, typeOfExpression, interpret, parse } import intp.global._ - import definitions.{ manifestToType, getClassIfDefined, getModuleIfDefined } + import definitions.{ manifestToType, manifestToSymbol, getClassIfDefined, getModuleIfDefined } abstract class SymSlurper { def isKeep(sym: Symbol): Boolean @@ -65,10 +65,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl } } - class PackageSlurper(pkgName: String) extends SymSlurper { - val pkgSymbol = getModuleIfDefined(pkgName) - val modClass = pkgSymbol.moduleClass - + class PackageSlurper(packageClass: Symbol) extends SymSlurper { /** Looking for dwindling returns */ def droppedEnough() = unseenHistory.size >= 4 && { unseenHistory takeRight 4 sliding 2 forall { it => @@ -79,9 +76,16 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl def isRecur(sym: Symbol) = true def isIgnore(sym: Symbol) = sym.isAnonOrRefinementClass || (sym.name.toString contains "$mc") - def isKeep(sym: Symbol) = sym.hasTransOwner(modClass) + def isKeep(sym: Symbol) = sym.hasTransOwner(packageClass) def isFinished() = droppedEnough() - def slurp() = apply(modClass) + def slurp() = { + if (packageClass.isPackageClass) + apply(packageClass) + else { + repldbg("Not a package class! " + packageClass) + Set() + } + } } private def customBanner = replProps.powerBanner.option flatMap (f => io.File(f).safeSlurp()) @@ -124,7 +128,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl def to_str(m: Symbol) = "%12s %s".format( m.decodedName, "" + elimRefinement(m.accessedOrSelf.tpe) stripPrefix "scala.tools.nsc.") - ( rutil.info[ReplValsImpl].declares + ( rutil.info[ReplValsImpl].membersDeclared filter (m => m.isPublic && !m.hasModuleFlag && !m.isConstructor) sortBy (_.decodedName) map to_str @@ -136,59 +140,78 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl implicit def apply[T: Manifest] : InternalInfo[T] = new InternalInfo[T](None) } object InternalInfo extends LowPriorityInternalInfo { } + + /** Now dealing with the problem of acidentally calling a method on Type + * when you're holding a Symbol and seeing the Symbol converted to the + * type of Symbol rather than the type of the thing represented by the + * symbol, by only implicitly installing one method, "?", and the rest + * of the conveniences exist on that wrapper. + */ + trait LowPriorityInternalInfoWrapper { + implicit def apply[T: Manifest] : InternalInfoWrapper[T] = new InternalInfoWrapper[T](None) + } + object InternalInfoWrapper extends LowPriorityInternalInfoWrapper { + + } + class InternalInfoWrapper[T: Manifest](value: Option[T] = None) { + def ? : InternalInfo[T] = new InternalInfo[T](value) + } /** Todos... * translate manifest type arguments into applied types * customizable symbol filter (had to hardcode no-spec to reduce noise) */ class InternalInfo[T: Manifest](value: Option[T] = None) { - // Decided it was unwise to have implicit conversions via commonly - // used type/symbol methods, because it's too easy to e.g. call - // "x.tpe" where x is a Type, and rather than failing you get the - // Type representing Types#Type (or Manifest, or whatever.) - private def tpe = tpe_ - private def symbol = symbol_ - private def name = name_ - - def symbol_ : Symbol = getClassIfDefined(erasure.getName) - def tpe_ : Type = manifestToType(man) - def name_ : Name = symbol.name - def companion = symbol.companionSymbol - def info = symbol.info - def module = symbol.moduleClass - def owner = symbol.owner - def owners = symbol.ownerChain drop 1 - def defn = symbol.defString - def decls = symbol.info.decls - - def declares = decls.toList - def inherits = members filterNot (declares contains _) - def types = members filter (_.name.isTypeName) - def methods = members filter (_.isMethod) - def overrides = declares filter (_.isOverride) - def inPackage = owners find (x => x.isPackageClass || x.isPackage) getOrElse definitions.RootPackage - - def man = manifest[T] - def erasure = man.erasure - def members = tpe.members filterNot (_.name.toString contains "$mc") - def allMembers = tpe.members - def bts = info.baseTypeSeq.toList - def btsmap = bts map (x => (x, x.decls.toList)) toMap - def pkgName = Option(erasure.getPackage) map (_.getName) - def pkg = pkgName map getModuleIfDefined getOrElse NoSymbol - def pkgmates = pkg.tpe.members - def pkgslurp = pkgName match { - case Some(name) => new PackageSlurper(name) slurp() - case _ => Set() - } - def ? = this - - def whoHas(name: String) = bts filter (_.decls exists (_.name.toString == name)) - def <:<[U: Manifest](other: U) = tpe <:< InternalInfo[U].tpe - def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, InternalInfo[U].tpe)) - def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, InternalInfo[U].tpe)) + private def newInfo[U: Manifest](value: U): InternalInfo[U] = new InternalInfo[U](Some(value)) + private def isSpecialized(s: Symbol) = s.name.toString contains "$mc" + private def isImplClass(s: Symbol) = s.name.toString endsWith "$class" + + /** Standard noise reduction filter. */ + def excludeMember(s: Symbol) = ( + isSpecialized(s) + || isImplClass(s) + || s.isAnonOrRefinementClass + || s.isAnonymousFunction + ) + def symbol = manifestToSymbol(fullManifest) + def tpe = manifestToType(fullManifest) + def name = symbol.name + def companion = symbol.companionSymbol + def info = symbol.info + def moduleClass = symbol.moduleClass + def owner = symbol.owner + def owners = symbol.ownerChain drop 1 + def signature = symbol.defString + + def decls = info.decls + def declsOverride = membersDeclared filter (_.isOverride) + def declsOriginal = membersDeclared filterNot (_.isOverride) + + def members = membersUnabridged filterNot excludeMember + def membersUnabridged = tpe.members + def membersDeclared = members filterNot excludeMember + def membersInherited = members filterNot (membersDeclared contains _) + def memberTypes = members filter (_.name.isTypeName) + def memberMethods = members filter (_.isMethod) + + def pkg = symbol.enclosingPackage + def pkgName = pkg.fullName + def pkgClass = symbol.enclosingPackageClass + def pkgMembers = pkg.info.members filterNot excludeMember + def pkgClasses = pkgMembers filter (s => s.isClass && s.isDefinedInPackage) + def pkgSymbols = new PackageSlurper(pkgClass).slurp() filterNot excludeMember + + def fullManifest = manifest[T] + def erasure = fullManifest.erasure + def shortClass = erasure.getName split "[$.]" last + def baseTypeSeq = tpe.baseTypeSeq.toList + def baseTypeSeqMap = baseTypeSeq map (x => (x, x.decls.toList)) toMap + + def baseTypeWhichDefines(name: String) = baseTypeSeq filter (_.decls exists (_.name.toString == name)) + def <:<[U: Manifest](other: U) = tpe <:< newInfo(other).tpe + def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) + def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) - def shortClass = erasure.getName split "[$.]" last override def toString = value match { case Some(x) => "%s (%s)".format(x, shortClass) case _ => erasure.getName @@ -288,11 +311,17 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl def slurp(): String = io.Streamable.slurp(url) def pp() { intp prettyPrint slurp() } } - + class RichSymbolList(syms: List[Symbol]) { + def sigs = syms map (_.defString) + def infos = syms map (_.info) + } + trait Implicits1 { // fallback implicit def replPrinting[T](x: T)(implicit pretty: Prettifier[T] = Prettifier.default[T]) = new SinglePrettifierClass[T](x) + + implicit def liftToTypeName(s: String): TypeName = newTypeName(s) } trait Implicits2 extends Implicits1 { class RichSymbol(sym: Symbol) { @@ -309,7 +338,7 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl implicit lazy val powerSymbolOrdering: Ordering[Symbol] = Ordering[Name] on (_.name) implicit lazy val powerTypeOrdering: Ordering[Type] = Ordering[Symbol] on (_.typeSymbol) - implicit def replInternalInfo[T: Manifest](x: T): InternalInfo[T] = new InternalInfo[T](Some(x)) + implicit def replInternalInfo[T: Manifest](x: T): InternalInfoWrapper[T] = new InternalInfoWrapper[T](Some(x)) implicit def replEnhancedStrings(s: String): RichReplString = new RichReplString(s) implicit def replMultiPrinting[T: Prettifier](xs: TraversableOnce[T]): MultiPrettifierClass[T] = new MultiPrettifierClass[T](xs.toSeq) @@ -318,6 +347,9 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl implicit def replInputStream(in: InputStream)(implicit codec: Codec) = new RichInputStream(in) implicit def replEnhancedURLs(url: URL)(implicit codec: Codec): RichReplURL = new RichReplURL(url)(codec) + + implicit def liftToTermName(s: String): TermName = newTermName(s) + implicit def replListOfSymbols(xs: List[Symbol]) = new RichSymbolList(xs) } trait ReplUtilities { -- cgit v1.2.3 From 366fae9741283efc08edb32378f56a08417ff35a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 30 Jan 2012 10:34:14 -0800 Subject: Print compound types legibly. This one's about a million years overdue. Try this on for size, from the command line: printf ":power\nList(1).?.baseClasses.sigs >\n" | scala Also, a little more power mode refinement. --- src/compiler/scala/reflect/internal/Types.scala | 33 +++++++++++++++------- .../scala/tools/nsc/interpreter/Power.scala | 21 ++++++++------ 2 files changed, 35 insertions(+), 19 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index fab10f7896..371fb8d585 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1409,7 +1409,7 @@ trait Types extends api.Types { self: SymbolTable => // override def isNullable: Boolean = // parents forall (p => p.isNullable && !p.typeSymbol.isAbstractType); - + override def safeToString: String = parents.mkString(" with ") + (if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) @@ -1750,6 +1750,19 @@ trait Types extends api.Types { self: SymbolTable => // override def isNonNull: Boolean = symbol == NonNullClass || super.isNonNull; override def kind = "ClassInfoType" + + override def safeToString = + if (settings.debug.value || decls.size > 1) + formattedToString + else + super.safeToString + + /** A nicely formatted string with newlines and such. + */ + def formattedToString: String = + parents.mkString("\n with ") + + (if (settings.debug.value || parents.isEmpty || (decls.elems ne null)) + decls.mkString(" {\n ", "\n ", "\n}") else "") } object ClassInfoType extends ClassInfoTypeExtractor @@ -2479,7 +2492,7 @@ trait Types extends api.Types { self: SymbolTable => */ case class AntiPolyType(pre: Type, targs: List[Type]) extends Type { override def safeToString = - pre.toString + targs.mkString("(with type arguments ", ",", ")"); + pre.toString + targs.mkString("(with type arguments ", ", ", ")"); override def memberType(sym: Symbol) = appliedType(pre.memberType(sym), targs) // override def memberType(sym: Symbol) = pre.memberType(sym) match { // case PolyType(tparams, restp) => @@ -3521,14 +3534,14 @@ trait Types extends api.Types { self: SymbolTable => } override def toString = { - val boundsStr = ( - if (loBounds.isEmpty && hiBounds.isEmpty) "[]" - else { - val lostr = if (loBounds.isEmpty) "" else loBounds map (_.safeToString) mkString("_>:(", ", ", ")") - val histr = if (hiBounds.isEmpty) "" else hiBounds map (_.safeToString) mkString("_<:(", ", ", ")") - List(lostr, histr) filterNot (_ == "") mkString ("[", " | ", "]") - } - ) + val boundsStr = { + val lo = loBounds filterNot (_.typeSymbolDirect eq NothingClass) + val hi = hiBounds filterNot (_.typeSymbolDirect eq AnyClass) + val lostr = if (lo.isEmpty) Nil else List(lo.mkString(" >: (", ", ", ")")) + val histr = if (hi.isEmpty) Nil else List(hi.mkString(" <: (", ", ", ")")) + + lostr ++ histr mkString ("[", " | ", "]") + } if (inst eq NoType) boundsStr else boundsStr + " _= " + inst.safeToString } diff --git a/src/compiler/scala/tools/nsc/interpreter/Power.scala b/src/compiler/scala/tools/nsc/interpreter/Power.scala index 2ec41506ab..835fbb5638 100644 --- a/src/compiler/scala/tools/nsc/interpreter/Power.scala +++ b/src/compiler/scala/tools/nsc/interpreter/Power.scala @@ -187,12 +187,12 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl def declsOverride = membersDeclared filter (_.isOverride) def declsOriginal = membersDeclared filterNot (_.isOverride) - def members = membersUnabridged filterNot excludeMember - def membersUnabridged = tpe.members - def membersDeclared = members filterNot excludeMember - def membersInherited = members filterNot (membersDeclared contains _) - def memberTypes = members filter (_.name.isTypeName) - def memberMethods = members filter (_.isMethod) + def members = membersUnabridged filterNot excludeMember + def membersUnabridged = tpe.members + def membersDeclared = members filterNot excludeMember + def membersInherited = members filterNot (membersDeclared contains _) + def memberTypes = members filter (_.name.isTypeName) + def memberMethods = members filter (_.isMethod) def pkg = symbol.enclosingPackage def pkgName = pkg.fullName @@ -204,10 +204,13 @@ class Power[ReplValsImpl <: ReplVals : Manifest](val intp: IMain, replVals: Repl def fullManifest = manifest[T] def erasure = fullManifest.erasure def shortClass = erasure.getName split "[$.]" last - def baseTypeSeq = tpe.baseTypeSeq.toList - def baseTypeSeqMap = baseTypeSeq map (x => (x, x.decls.toList)) toMap - def baseTypeWhichDefines(name: String) = baseTypeSeq filter (_.decls exists (_.name.toString == name)) + def baseClasses = tpe.baseClasses + def baseClassDecls = baseClasses map (x => (x, x.info.decls.toList.sortBy(_.name.toString))) toMap + def ancestors = baseClasses drop 1 + def ancestorDeclares(name: String) = ancestors filter (_.info member newTermName(name) ne NoSymbol) + def baseTypes = tpe.baseTypeSeq.toList + def <:<[U: Manifest](other: U) = tpe <:< newInfo(other).tpe def lub[U: Manifest](other: U) = intp.global.lub(List(tpe, newInfo(other).tpe)) def glb[U: Manifest](other: U) = intp.global.glb(List(tpe, newInfo(other).tpe)) -- cgit v1.2.3 From 263aa2ead1f599f48b218027eb9550745fef43f1 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 30 Jan 2012 13:40:54 -0800 Subject: Have ant notice starr out of date. If your compiler .desired.sha1 is newer than the jar, it will go download it. --- build.xml | 15 +++++++++++++-- 1 file changed, 13 insertions(+), 2 deletions(-) diff --git a/build.xml b/build.xml index 39e47471c6..0cb728c35f 100644 --- a/build.xml +++ b/build.xml @@ -213,11 +213,22 @@ INITIALISATION + + + + + - - + + + + + -- cgit v1.2.3 From 2e664079445549288789ad24a95ce7d583ae205c Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 31 Jan 2012 11:30:41 +0100 Subject: Introduce getAnnotations that triggers symbol completion Default getter for annotations doesn't perform initialization, hence we've faced the following bug: https://issues.scala-lang.org/browse/SI-5423. One of the approaches to fixing it would be to auto-complete on getter, but according to Martin we'd better not do that because of cycles. That's why I'm just introducing a new, eager, variation of `annotations' and redirecting public API to it. Review by @odersky. --- src/compiler/scala/reflect/internal/Symbols.scala | 10 ++++++++++ src/library/scala/reflect/api/Symbols.scala | 2 +- test/files/run/t5423.check | 1 + test/files/run/t5423.scala | 12 ++++++++++++ 4 files changed, 24 insertions(+), 1 deletion(-) create mode 100644 test/files/run/t5423.check create mode 100644 test/files/run/t5423.scala diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 94d764067f..e777491300 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1272,6 +1272,16 @@ trait Symbols extends api.Symbols { self: SymbolTable => * the annotations attached to member a definition (class, method, type, field). */ def annotations: List[AnnotationInfo] = _annotations + + /** This getter is necessary for reflection, see https://issues.scala-lang.org/browse/SI-5423 + * We could auto-inject completion into `annotations' and `setAnnotations', but I'm not sure about that + * @odersky writes: I fear we can't do the forcing for all compiler symbols as that could introduce cycles + */ + def getAnnotations: List[AnnotationInfo] = { + initialize + _annotations + } + def setAnnotations(annots: List[AnnotationInfo]): this.type = { _annotations = annots this diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala index 01c1a0f2ae..17d9b06324 100755 --- a/src/library/scala/reflect/api/Symbols.scala +++ b/src/library/scala/reflect/api/Symbols.scala @@ -79,7 +79,7 @@ trait Symbols { self: Universe => /** A list of annotations attached to this Symbol. */ - def annotations: List[self.AnnotationInfo] + def getAnnotations: List[self.AnnotationInfo] /** For a class: the module or case class factory with the same name in the same package. * For all others: NoSymbol diff --git a/test/files/run/t5423.check b/test/files/run/t5423.check new file mode 100644 index 0000000000..ae3d3fb82b --- /dev/null +++ b/test/files/run/t5423.check @@ -0,0 +1 @@ +List(table) \ No newline at end of file diff --git a/test/files/run/t5423.scala b/test/files/run/t5423.scala new file mode 100644 index 0000000000..2139773ff1 --- /dev/null +++ b/test/files/run/t5423.scala @@ -0,0 +1,12 @@ +import java.lang.Class +import scala.reflect.mirror._ +import scala.reflect.runtime.Mirror.ToolBox +import scala.reflect.Code + +final class table extends StaticAnnotation +@table class A + +object Test extends App{ + val s = classToSymbol(classOf[A]) + println(s.getAnnotations) +} -- cgit v1.2.3 From 147e9eaf3814738f339b020e701a160ba2f68b60 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 31 Jan 2012 08:21:34 -0800 Subject: Improved warning for insensible comparisons. Utilize knowledge of case class synthetic equals to rule out some comparisons statically. Closes SI-5426. --- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 11 +++++++---- test/files/neg/checksensible.check | 5 ++++- test/files/neg/t5426.check | 13 +++++++++++++ test/files/neg/t5426.flags | 1 + test/files/neg/t5426.scala | 10 ++++++++++ 5 files changed, 35 insertions(+), 5 deletions(-) create mode 100644 test/files/neg/t5426.check create mode 100644 test/files/neg/t5426.flags create mode 100644 test/files/neg/t5426.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a99d09173e..a6c2f75d5e 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1034,10 +1034,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R /** Symbols which limit the warnings we can issue since they may be value types */ val isMaybeValue = Set(AnyClass, AnyRefClass, AnyValClass, ObjectClass, ComparableClass, JavaSerializableClass) - // Whether def equals(other: Any) is overridden - def isUsingDefaultEquals = { + // Whether def equals(other: Any) is overridden or synthetic + def isUsingWarnableEquals = { val m = receiver.info.member(nme.equals_) - (m == Object_equals) || (m == Any_equals) + (m == Object_equals) || (m == Any_equals) || (m.isSynthetic && m.owner.isCase) } // Whether this == or != is one of those defined in Any/AnyRef or an overload from elsewhere. def isUsingDefaultScalaOp = { @@ -1045,7 +1045,10 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R (s == Object_==) || (s == Object_!=) || (s == Any_==) || (s == Any_!=) } // Whether the operands+operator represent a warnable combo (assuming anyrefs) - def isWarnable = isReferenceOp || (isUsingDefaultEquals && isUsingDefaultScalaOp) + // Looking for comparisons performed with ==/!= in combination with either an + // equals method inherited from Object or a case class synthetic equals (for + // which we know the logic.) + def isWarnable = isReferenceOp || (isUsingDefaultScalaOp && isUsingWarnableEquals) def isEitherNullable = (NullClass.tpe <:< receiver.info) || (NullClass.tpe <:< actual.info) def isBoolean(s: Symbol) = unboxedValueClass(s) == BooleanClass def isUnit(s: Symbol) = unboxedValueClass(s) == UnitClass diff --git a/test/files/neg/checksensible.check b/test/files/neg/checksensible.check index 0881205bb4..d785179a56 100644 --- a/test/files/neg/checksensible.check +++ b/test/files/neg/checksensible.check @@ -28,6 +28,9 @@ checksensible.scala:27: error: comparing values of types Int and Unit using `==' checksensible.scala:29: error: comparing values of types Int and String using `==' will always yield false 1 == "abc" ^ +checksensible.scala:33: error: comparing values of types Some[Int] and Int using `==' will always yield false + Some(1) == 1 // as above + ^ checksensible.scala:38: error: comparing a fresh object using `==' will always yield false new AnyRef == 1 ^ @@ -94,4 +97,4 @@ checksensible.scala:84: error: comparing values of types EqEqRefTest.this.C3 and checksensible.scala:95: error: comparing values of types Unit and Int using `!=' will always yield true while ((c = in.read) != -1) ^ -32 errors found +33 errors found diff --git a/test/files/neg/t5426.check b/test/files/neg/t5426.check new file mode 100644 index 0000000000..d9e192d3f0 --- /dev/null +++ b/test/files/neg/t5426.check @@ -0,0 +1,13 @@ +t5426.scala:2: error: comparing values of types Some[Int] and Int using `==' will always yield false + def f1 = Some(5) == 5 + ^ +t5426.scala:3: error: comparing values of types Int and Some[Int] using `==' will always yield false + def f2 = 5 == Some(5) + ^ +t5426.scala:8: error: comparing values of types Int and Some[Int] using `==' will always yield false + (x1 == x2) + ^ +t5426.scala:9: error: comparing values of types Some[Int] and Int using `==' will always yield false + (x2 == x1) + ^ +four errors found diff --git a/test/files/neg/t5426.flags b/test/files/neg/t5426.flags new file mode 100644 index 0000000000..e8fb65d50c --- /dev/null +++ b/test/files/neg/t5426.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/t5426.scala b/test/files/neg/t5426.scala new file mode 100644 index 0000000000..f2fb5cc12c --- /dev/null +++ b/test/files/neg/t5426.scala @@ -0,0 +1,10 @@ +class A { + def f1 = Some(5) == 5 + def f2 = 5 == Some(5) + + val x1 = 5 + val x2 = Some(5) + + (x1 == x2) + (x2 == x1) +} -- cgit v1.2.3 From 37bcff7956451cd74d08899e0e49c8b569d3a882 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 31 Jan 2012 09:03:32 -0800 Subject: Test case closes SI-5352. --- test/files/neg/t5352.check | 13 +++++++++++++ test/files/neg/t5352.flags | 1 + test/files/neg/t5352.scala | 15 +++++++++++++++ 3 files changed, 29 insertions(+) create mode 100644 test/files/neg/t5352.check create mode 100644 test/files/neg/t5352.flags create mode 100644 test/files/neg/t5352.scala diff --git a/test/files/neg/t5352.check b/test/files/neg/t5352.check new file mode 100644 index 0000000000..d24b0e8ee1 --- /dev/null +++ b/test/files/neg/t5352.check @@ -0,0 +1,13 @@ +t5352.scala:11: error: type mismatch; + found : boop.Bar + required: boop.BarF + (which expands to) AnyRef{def f(): Int} + x = xs.head + ^ +t5352.scala:14: error: method f in class Bar1 cannot be accessed in boop.Bar1 + Access to protected method f not permitted because + enclosing object boop is not a subclass of + class Bar1 in object boop where target is defined + (new Bar1).f + ^ +two errors found diff --git a/test/files/neg/t5352.flags b/test/files/neg/t5352.flags new file mode 100644 index 0000000000..e8fb65d50c --- /dev/null +++ b/test/files/neg/t5352.flags @@ -0,0 +1 @@ +-Xfatal-warnings \ No newline at end of file diff --git a/test/files/neg/t5352.scala b/test/files/neg/t5352.scala new file mode 100644 index 0000000000..6ee41f5680 --- /dev/null +++ b/test/files/neg/t5352.scala @@ -0,0 +1,15 @@ +object boop { + abstract class Bar { protected def f(): Any } + class Bar1 extends Bar { protected def f(): Int = 5 } + class Bar2 extends Bar { protected def f(): Int = 5 } + + val xs = List(new Bar1, new Bar2) + + type BarF = { def f(): Int } + + var x: BarF = _ + x = xs.head + x.f + + (new Bar1).f +} -- cgit v1.2.3 From 41ce53e3e643e85a022393e54c952ec4dee2cb57 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 31 Jan 2012 19:23:00 +0100 Subject: Tests for https://issues.scala-lang.org/browse/SI-5427 --- test/pending/run/t5427a.check | 1 + test/pending/run/t5427a.scala | 10 ++++++++++ test/pending/run/t5427b.check | 1 + test/pending/run/t5427b.scala | 11 +++++++++++ test/pending/run/t5427c.check | 1 + test/pending/run/t5427c.scala | 13 +++++++++++++ test/pending/run/t5427d.check | 1 + test/pending/run/t5427d.scala | 11 +++++++++++ 8 files changed, 49 insertions(+) create mode 100644 test/pending/run/t5427a.check create mode 100644 test/pending/run/t5427a.scala create mode 100644 test/pending/run/t5427b.check create mode 100644 test/pending/run/t5427b.scala create mode 100644 test/pending/run/t5427c.check create mode 100644 test/pending/run/t5427c.scala create mode 100644 test/pending/run/t5427d.check create mode 100644 test/pending/run/t5427d.scala diff --git a/test/pending/run/t5427a.check b/test/pending/run/t5427a.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/pending/run/t5427a.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/pending/run/t5427a.scala b/test/pending/run/t5427a.scala new file mode 100644 index 0000000000..27b28da0ac --- /dev/null +++ b/test/pending/run/t5427a.scala @@ -0,0 +1,10 @@ +import scala.reflect.mirror._ + +object Foo { val bar = 2 } + +object Test extends App { + val tpe = getType(Foo) + val bar = tpe.nonPrivateMember(newTermName("bar")) + val value = getValue(Foo, bar) + println(value) +} \ No newline at end of file diff --git a/test/pending/run/t5427b.check b/test/pending/run/t5427b.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/pending/run/t5427b.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/pending/run/t5427b.scala b/test/pending/run/t5427b.scala new file mode 100644 index 0000000000..7a92b6ebbe --- /dev/null +++ b/test/pending/run/t5427b.scala @@ -0,0 +1,11 @@ +import scala.reflect.mirror._ + +class Foo { val bar = 2 } + +object Test extends App { + val foo = new Foo + val tpe = getType(foo) + val bar = tpe.nonPrivateMember(newTermName("bar")) + val value = getValue(foo, bar) + println(value) +} \ No newline at end of file diff --git a/test/pending/run/t5427c.check b/test/pending/run/t5427c.check new file mode 100644 index 0000000000..32c91abbd6 --- /dev/null +++ b/test/pending/run/t5427c.check @@ -0,0 +1 @@ +no public member \ No newline at end of file diff --git a/test/pending/run/t5427c.scala b/test/pending/run/t5427c.scala new file mode 100644 index 0000000000..ab41d8b8cd --- /dev/null +++ b/test/pending/run/t5427c.scala @@ -0,0 +1,13 @@ +import scala.reflect.mirror._ + +class Foo(bar: Int) + +object Test extends App { + val foo = new Foo(2) + val tpe = getType(foo) + val bar = tpe.nonPrivateMember(newTermName("bar")) + bar match { + case NoSymbol => println("no public member") + case _ => println("i'm screwed") + } +} \ No newline at end of file diff --git a/test/pending/run/t5427d.check b/test/pending/run/t5427d.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/pending/run/t5427d.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/pending/run/t5427d.scala b/test/pending/run/t5427d.scala new file mode 100644 index 0000000000..fd4c62e876 --- /dev/null +++ b/test/pending/run/t5427d.scala @@ -0,0 +1,11 @@ +import scala.reflect.mirror._ + +class Foo(val bar: Int) + +object Test extends App { + val foo = new Foo(2) + val tpe = getType(foo) + val bar = tpe.nonPrivateMember(newTermName("bar")) + val value = getValue(foo, bar) + println(value) +} \ No newline at end of file -- cgit v1.2.3 From 18d6a9f5054bf7f1c76e48c3eb8da0fb96ccdccb Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 1 Feb 2012 15:16:14 +0100 Subject: Fixed handling of empty keys in emitSWITCH. The problem of emitSWITCH not handling empty keys popped up when I tried to implement unfolding of pattern alternatives in genicode instead of in typers/explicitouter. This change makes perfect sense in isolation as bytecode allows LOOKUPSWITCHes that have only default case. I actually verified that this kind of bytecode is generated by javac when one has switch statement with only default case defined. Review by @paulp or @dragos. --- lib/fjbg.jar.desired.sha1 | 2 +- src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java | 10 ++++++++++ 2 files changed, 11 insertions(+), 1 deletion(-) diff --git a/lib/fjbg.jar.desired.sha1 b/lib/fjbg.jar.desired.sha1 index 1b1068b0d3..d24a5d01fc 100644 --- a/lib/fjbg.jar.desired.sha1 +++ b/lib/fjbg.jar.desired.sha1 @@ -1 +1 @@ -9aa9c99b8032e454f1f85d27de31a88b3dec1045 ?fjbg.jar +c3f9b576c91cb9761932ad936ccc4a71f33d2ef2 ?fjbg.jar diff --git a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java index 8b0338ed29..d4c5417260 100644 --- a/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java +++ b/src/fjbg/ch/epfl/lamp/fjbg/JExtendedCode.java @@ -596,6 +596,16 @@ public class JExtendedCode extends JCode { double minDensity) { assert keys.length == branches.length; + //The special case for empty keys. It makes sense to allow + //empty keys and generate LOOKUPSWITCH with defaultBranch + //only. This is exactly what javac does for switch statement + //that has only a default case. + if (keys.length == 0) { + emitLOOKUPSWITCH(keys, branches, defaultBranch); + return; + } + //the rest of the code assumes that keys.length > 0 + // sorting the tables // FIXME use quicksort for (int i = 1; i < keys.length; i++) { -- cgit v1.2.3 From e311585e26449a921bc8a40b87b2552f1d363086 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 25 Jan 2012 18:38:38 +0100 Subject: Unfold pattern alternatives in genicode. Implemented unfolding of pattern alternatives that can be translated into switch table in genicode. This way pattern matcher can keep simple patterns as-is and let backend handle translation of them instead of generating bunch of LabelDefs and jumps. Review by @dragos or @magarciaEPFL as both seem to know genicode very well. --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 9 +++++++++ 1 file changed, 9 insertions(+) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 3d650ef753..3baff7da9e 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -1072,6 +1072,15 @@ abstract class GenICode extends SubComponent { targets = tmpCtx.bb :: targets case Ident(nme.WILDCARD) => default = tmpCtx.bb + case Alternative(alts) => + alts foreach { + case Literal(value) => + tags = value.intValue :: tags + targets = tmpCtx.bb :: targets + case _ => + abort("Invalid case in alternative in switch-like pattern match: " + + tree + " at: " + tree.pos) + } case _ => abort("Invalid case statement in switch-like pattern match: " + tree + " at: " + (tree.pos)) -- cgit v1.2.3 From c0e87de8e9421cc2e03a066f307206f967fe518a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 07:52:55 -0800 Subject: Tweak to repl debugging output. --- src/compiler/scala/tools/nsc/interpreter/IMain.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 567d6c2f78..56bb72ca6f 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -831,7 +831,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends case xs => sys.error("Internal error: eval object " + evalClass + ", " + xs.mkString("\n", "\n", "")) } private def compileAndSaveRun(label: String, code: String) = { - showCodeIfDebugging(code) + showCodeIfDebugging(packaged(code)) val (success, run) = compileSourcesKeepingRun(new BatchSourceFile(label, packaged(code))) lastRun = run success -- cgit v1.2.3 From fe289dc0fd8172012e4d57d09658e2dfd0a4cdcf Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 1 Feb 2012 16:50:31 +0100 Subject: Add support for combiner sharing to parallel collections. Concurrent (thread-safe) collections can implement builders directly for parallel collections by having themselves as builders, similar to buffers being builders for themselves in sequential collections. Combining 2 shared combiners can be expensive (more than logn), but is never done, since `combine` is always called with `this` (receiver) combiner, so `combine` just returns `this`. This commit adds support for implementing a parallel collection by having itself as combiner (parallel builder). To do this, clients must now merely implement their combiners by overriding the method `canBeShared` and having it return `true`. The support is implemented by refining the abstraction which creates combiners in tasks - this abstraction is provided by the protected `combinerFactory` method, previously `cbfactory`. The essential difference is that if the combiner can be shared, this method returns a combiner factory which always returns the same combiner. Otherwise, it always creates a new combiner. Span and takeWhile had to be modified for shared combiners to make this work, but for all other collections they work as before. Several other tasks had to be modified slightly, as well. --- .../scala/collection/parallel/Combiner.scala | 9 +- .../collection/parallel/ParIterableLike.scala | 277 ++++++++++++--------- .../scala/collection/parallel/ParSeqLike.scala | 11 +- .../scala/collection/parallel/package.scala | 15 +- 4 files changed, 193 insertions(+), 119 deletions(-) diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index d1453c9ce9..a2cab7eb5d 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -62,7 +62,14 @@ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { * @return the parallel builder containing both the elements of this and the `other` builder */ def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] - + + /** Returns `true` if this combiner has a thread-safe `+=` and is meant to be shared + * across several threads constructing the collection. + * + * By default, this method returns `false`. + */ + def canBeShared: Boolean = false + } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 390bd72ab5..75f4552076 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -165,13 +165,13 @@ extends GenIterableLike[T, Repr] with HasNewCombiner[T, Repr] { self: ParIterableLike[T, Repr, Sequential] => - + import tasksupport._ - + def seq: Sequential def repr: Repr = this.asInstanceOf[Repr] - + /** Parallel iterators are split iterators that have additional accessor and * transformer methods defined in terms of methods `next` and `hasNext`. * When creating a new parallel collection, one might want to override these @@ -189,7 +189,7 @@ self: ParIterableLike[T, Repr, Sequential] => def repr = self.repr def split: Seq[IterableSplitter[T]] } - + /** A stackable modification that ensures signal contexts get passed along the iterators. * A self-type requirement in `ParIterator` ensures that this trait gets mixed into * concrete iterators. @@ -211,7 +211,7 @@ self: ParIterableLike[T, Repr, Sequential] => def hasDefiniteSize = true def nonEmpty = size != 0 - + /** Creates a new parallel iterator used to traverse the elements of this parallel collection. * This iterator is more specific than the iterator of the returned by `iterator`, and augmented * with additional accessor and transformer methods. @@ -293,7 +293,7 @@ self: ParIterableLike[T, Repr, Sequential] => trait SignallingOps[PI <: DelegatedSignalling] { def assign(cntx: Signalling): PI } - + /* convenience task operations wrapper */ protected implicit def task2ops[R, Tp](tsk: SSCTask[R, Tp]) = new TaskOps[R, Tp] { def mapResult[R1](mapping: R => R1): ResultMapping[R, Tp, R1] = new ResultMapping[R, Tp, R1](tsk) { @@ -321,7 +321,7 @@ self: ParIterableLike[T, Repr, Sequential] => it } } - + protected implicit def builder2ops[Elem, To](cb: Builder[Elem, To]) = new BuilderOps[Elem, To] { def ifIs[Cmb](isbody: Cmb => Unit) = new Otherwise[Cmb] { def otherwise(notbody: => Unit)(implicit m: ClassManifest[Cmb]) { @@ -331,12 +331,12 @@ self: ParIterableLike[T, Repr, Sequential] => def isCombiner = cb.isInstanceOf[Combiner[_, _]] def asCombiner = cb.asInstanceOf[Combiner[Elem, To]] } - + protected[this] def bf2seq[S, That](bf: CanBuildFrom[Repr, S, That]) = new CanBuildFrom[Sequential, S, That] { def apply(from: Sequential) = bf.apply(from.par.asInstanceOf[Repr]) // !!! we only use this on `this.seq`, and know that `this.seq.par.getClass == this.getClass` def apply() = bf.apply() } - + protected[this] def sequentially[S, That <: Parallel](b: Sequential => Parallelizable[S, That]) = b(seq).par.asInstanceOf[Repr] def mkString(start: String, sep: String, end: String): String = seq.mkString(start, sep, end) @@ -346,7 +346,7 @@ self: ParIterableLike[T, Repr, Sequential] => def mkString: String = seq.mkString("") override def toString = seq.mkString(stringPrefix + "(", ", ", ")") - + def canEqual(other: Any) = true /** Reduces the elements of this sequence using the specified associative binary operator. @@ -383,7 +383,7 @@ self: ParIterableLike[T, Repr, Sequential] => * the elements if the collection is nonempty, and `None` otherwise. */ def reduceOption[U >: T](op: (U, U) => U): Option[U] = if (isEmpty) None else Some(reduce(op)) - + /** Folds the elements of this sequence using the specified associative binary operator. * The order in which the elements are reduced is unspecified and may be nondeterministic. * @@ -434,15 +434,11 @@ self: ParIterableLike[T, Repr, Sequential] => def aggregate[S](z: S)(seqop: (S, T) => S, combop: (S, S) => S): S = { executeAndWaitResult(new Aggregate(z, seqop, combop, splitter)) } - - def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op) - - def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op) - + def foldLeft[S](z: S)(op: (S, T) => S): S = seq.foldLeft(z)(op) - + def foldRight[S](z: S)(op: (T, S) => S): S = seq.foldRight(z)(op) - + def reduceLeft[U >: T](op: (U, T) => U): U = seq.reduceLeft(op) def reduceRight[U >: T](op: (T, U) => U): U = seq.reduceRight(op) @@ -451,20 +447,6 @@ self: ParIterableLike[T, Repr, Sequential] => def reduceRightOption[U >: T](op: (T, U) => U): Option[U] = seq.reduceRightOption(op) - /* - /** Applies a function `f` to all the elements of $coll. Does so in a nondefined order, - * and in parallel. - * - * $undefinedorder - * - * @tparam U the result type of the function applied to each element, which is always discarded - * @param f function applied to each element - */ - def pareach[U](f: T => U): Unit = { - executeAndWaitResult(new Foreach(f, splitter)) - } - */ - /** Applies a function `f` to all the elements of $coll in a sequential order. * * @tparam U the result type of the function applied to each element, which is always discarded @@ -507,21 +489,21 @@ self: ParIterableLike[T, Repr, Sequential] => } def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new Map[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.map(f)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new Map[S, That](f, pbf, splitter) mapResult { _.result }) } otherwise seq.map(f)(bf2seq(bf))*/ def collect[S, That](pf: PartialFunction[T, S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new Collect[S, That](pf, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new Collect[S, That](pf, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.collect(pf)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new Collect[S, That](pf, pbf, splitter) mapResult { _.result }) } otherwise seq.collect(pf)(bf2seq(bf))*/ def flatMap[S, That](f: T => GenTraversableOnce[S])(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { - executeAndWaitResult(new FlatMap[S, That](f, () => bf(repr).asCombiner, splitter) mapResult { _.result }) + executeAndWaitResult(new FlatMap[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.flatMap(f)(bf2seq(bf)) /*bf ifParallel { pbf => executeAndWaitResult(new FlatMap[S, That](f, pbf, splitter) mapResult { _.result }) @@ -563,17 +545,48 @@ self: ParIterableLike[T, Repr, Sequential] => def find(pred: T => Boolean): Option[T] = { executeAndWaitResult(new Find(pred, splitter assign new DefaultSignalling with VolatileAbort)) } - - protected[this] def cbfactory ={ - () => newCombiner + + /** Creates a combiner factory. Each combiner factory instance is used + * once per invocation of a parallel transformer method for a single + * collection. + * + * The default combiner factory creates a new combiner every time it + * is requested, unless the combiner is thread-safe as indicated by its + * `canBeShared` method. In this case, the method returns a factory which + * returns the same combiner each time. This is typically done for + * concurrent parallel collections, the combiners of which allow + * thread safe access. + */ + protected[this] def combinerFactory = { + val combiner = newCombiner + if (combiner.canBeShared) new CombinerFactory[T, Repr] { + val shared = combiner + def apply() = shared + def doesShareCombiners = true + } else new CombinerFactory[T, Repr] { + def apply() = newCombiner + def doesShareCombiners = false + } } - + + protected[this] def combinerFactory[S, That](cbf: () => Combiner[S, That]) = { + val combiner = cbf() + if (combiner.canBeShared) new CombinerFactory[S, That] { + val shared = combiner + def apply() = shared + def doesShareCombiners = true + } else new CombinerFactory[S, That] { + def apply() = cbf() + def doesShareCombiners = false + } + } + def filter(pred: T => Boolean): Repr = { - executeAndWaitResult(new Filter(pred, cbfactory, splitter) mapResult { _.result }) + executeAndWaitResult(new Filter(pred, combinerFactory, splitter) mapResult { _.result }) } def filterNot(pred: T => Boolean): Repr = { - executeAndWaitResult(new FilterNot(pred, cbfactory, splitter) mapResult { _.result }) + executeAndWaitResult(new FilterNot(pred, combinerFactory, splitter) mapResult { _.result }) } def ++[U >: T, That](that: GenTraversableOnce[U])(implicit bf: CanBuildFrom[Repr, U, That]): That = { @@ -581,9 +594,10 @@ self: ParIterableLike[T, Repr, Sequential] => // println("case both are parallel") val other = that.asParIterable val pbf = bf.asParallel - val copythis = new Copy(() => pbf(repr), splitter) + val cfactory = combinerFactory(() => pbf(repr)) + val copythis = new Copy(cfactory, splitter) val copythat = wrap { - val othtask = new other.Copy(() => pbf(self.repr), other.splitter) + val othtask = new other.Copy(cfactory, other.splitter) tasksupport.executeAndWaitResult(othtask) } val task = (copythis parallel copythat) { _ combine _ } mapResult { @@ -593,7 +607,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else if (bf.isParallel) { // println("case parallel builder, `that` not parallel") val pbf = bf.asParallel - val copythis = new Copy(() => pbf(repr), splitter) + val copythis = new Copy(combinerFactory(() => pbf(repr)), splitter) val copythat = wrap { val cb = pbf(repr) for (elem <- that.seq) cb += elem @@ -610,19 +624,19 @@ self: ParIterableLike[T, Repr, Sequential] => } def partition(pred: T => Boolean): (Repr, Repr) = { - executeAndWaitResult(new Partition(pred, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) }) + executeAndWaitResult(new Partition(pred, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) }) } def groupBy[K](f: T => K): immutable.ParMap[K, Repr] = { executeAndWaitResult(new GroupBy(f, () => HashMapCombiner[K, T], splitter) mapResult { - rcb => rcb.groupByKey(cbfactory) + rcb => rcb.groupByKey(() => combinerFactory()) }) } def take(n: Int): Repr = { val actualn = if (size > n) n else size if (actualn < MIN_FOR_COPY) take_sequential(actualn) - else executeAndWaitResult(new Take(actualn, cbfactory, splitter) mapResult { + else executeAndWaitResult(new Take(actualn, combinerFactory, splitter) mapResult { _.result }) } @@ -642,7 +656,7 @@ self: ParIterableLike[T, Repr, Sequential] => def drop(n: Int): Repr = { val actualn = if (size > n) n else size if ((size - actualn) < MIN_FOR_COPY) drop_sequential(actualn) - else executeAndWaitResult(new Drop(actualn, cbfactory, splitter) mapResult { _.result }) + else executeAndWaitResult(new Drop(actualn, combinerFactory, splitter) mapResult { _.result }) } private def drop_sequential(n: Int) = { @@ -657,7 +671,7 @@ self: ParIterableLike[T, Repr, Sequential] => val from = unc_from min size max 0 val until = unc_until min size max from if ((until - from) <= MIN_FOR_COPY) slice_sequential(from, until) - else executeAndWaitResult(new Slice(from, until, cbfactory, splitter) mapResult { _.result }) + else executeAndWaitResult(new Slice(from, until, combinerFactory, splitter) mapResult { _.result }) } private def slice_sequential(from: Int, until: Int): Repr = { @@ -672,7 +686,7 @@ self: ParIterableLike[T, Repr, Sequential] => } def splitAt(n: Int): (Repr, Repr) = { - executeAndWaitResult(new SplitAt(n, cbfactory, splitter) mapResult { p => (p._1.result, p._2.result) }) + executeAndWaitResult(new SplitAt(n, combinerFactory, combinerFactory, splitter) mapResult { p => (p._1.result, p._2.result) }) } /** Computes a prefix scan of the elements of the collection. @@ -694,7 +708,7 @@ self: ParIterableLike[T, Repr, Sequential] => val cbf = bf.asParallel if (parallelismLevel > 1) { if (size > 0) executeAndWaitResult(new CreateScanTree(0, size, z, op, splitter) mapResult { - tree => executeAndWaitResult(new FromScanTree(tree, z, op, cbf) mapResult { + tree => executeAndWaitResult(new FromScanTree(tree, z, op, combinerFactory(() => cbf(repr))) mapResult { cb => cb.result }) }) else (cbf(self.repr) += z).result @@ -714,9 +728,15 @@ self: ParIterableLike[T, Repr, Sequential] => * @return the longest prefix of this $coll of elements that satisy the predicate `pred` */ def takeWhile(pred: T => Boolean): Repr = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new TakeWhile(0, pred, cbfactory, splitter assign cntx) mapResult { _._1.result }) + val cbf = combinerFactory + if (cbf.doesShareCombiners) { + val parseqspan = toSeq.takeWhile(pred) + executeAndWaitResult(new Copy(combinerFactory, parseqspan.splitter) mapResult { _.result }) + } else { + val cntx = new DefaultSignalling with AtomicIndexFlag + cntx.setIndexFlag(Int.MaxValue) + executeAndWaitResult(new TakeWhile(0, pred, combinerFactory, splitter assign cntx) mapResult { _._1.result }) + } } /** Splits this $coll into a prefix/suffix pair according to a predicate. @@ -729,11 +749,22 @@ self: ParIterableLike[T, Repr, Sequential] => * the elements satisfy `pred`, and the rest of the collection */ def span(pred: T => Boolean): (Repr, Repr) = { - val cntx = new DefaultSignalling with AtomicIndexFlag - cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { - p => (p._1.result, p._2.result) - }) + val cbf = combinerFactory + if (cbf.doesShareCombiners) { + val (xs, ys) = toSeq.span(pred) + val copyxs = new Copy(combinerFactory, xs.splitter) mapResult { _.result } + val copyys = new Copy(combinerFactory, ys.splitter) mapResult { _.result } + val copyall = (copyxs parallel copyys) { + (xr, yr) => (xr, yr) + } + executeAndWaitResult(copyall) + } else { + val cntx = new DefaultSignalling with AtomicIndexFlag + cntx.setIndexFlag(Int.MaxValue) + executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { + p => (p._1.result, p._2.result) + }) + } } /** Drops all elements in the longest prefix of elements that satisfy the predicate, @@ -749,7 +780,7 @@ self: ParIterableLike[T, Repr, Sequential] => def dropWhile(pred: T => Boolean): Repr = { val cntx = new DefaultSignalling with AtomicIndexFlag cntx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new Span(0, pred, cbfactory, splitter assign cntx) mapResult { _._2.result }) + executeAndWaitResult(new Span(0, pred, combinerFactory, combinerFactory, splitter assign cntx) mapResult { _._2.result }) } def copyToArray[U >: T](xs: Array[U]) = copyToArray(xs, 0) @@ -765,7 +796,7 @@ self: ParIterableLike[T, Repr, Sequential] => def zip[U >: T, S, That](that: GenIterable[S])(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) { val pbf = bf.asParallel val thatseq = that.asParSeq - executeAndWaitResult(new Zip(pbf, splitter, thatseq.splitter) mapResult { _.result }); + executeAndWaitResult(new Zip(combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result }); } else seq.zip(that)(bf2seq(bf)) def zipWithIndex[U >: T, That](implicit bf: CanBuildFrom[Repr, (U, Int), That]): That = this zip immutable.ParRange(0, size, 1, false) @@ -773,15 +804,15 @@ self: ParIterableLike[T, Repr, Sequential] => def zipAll[S, U >: T, That](that: GenIterable[S], thisElem: U, thatElem: S)(implicit bf: CanBuildFrom[Repr, (U, S), That]): That = if (bf.isParallel && that.isParSeq) { val pbf = bf.asParallel val thatseq = that.asParSeq - executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, pbf, splitter, thatseq.splitter) mapResult { _.result }); + executeAndWaitResult(new ZipAll(size max thatseq.length, thisElem, thatElem, combinerFactory(() => pbf(repr)), splitter, thatseq.splitter) mapResult { _.result }); } else seq.zipAll(that, thisElem, thatElem)(bf2seq(bf)) protected def toParCollection[U >: T, That](cbf: () => Combiner[U, That]): That = { - executeAndWaitResult(new ToParCollection(cbf, splitter) mapResult { _.result }); + executeAndWaitResult(new ToParCollection(combinerFactory(cbf), splitter) mapResult { _.result }); } protected def toParMap[K, V, That](cbf: () => Combiner[(K, V), That])(implicit ev: T <:< (K, V)): That = { - executeAndWaitResult(new ToParMap(cbf, splitter)(ev) mapResult { _.result }) + executeAndWaitResult(new ToParMap(combinerFactory(cbf), splitter)(ev) mapResult { _.result }) } def view = new ParIterableView[T, Repr, Sequential] { @@ -869,7 +900,7 @@ self: ParIterableLike[T, Repr, Sequential] => /** Sequentially performs one task after another. */ protected[this] abstract class SeqComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) + (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { executeAndWaitResult(ft) @@ -880,7 +911,7 @@ self: ParIterableLike[T, Repr, Sequential] => /** Performs two tasks in parallel, and waits for both to finish. */ protected[this] abstract class ParComposite[FR, SR, R, First <: StrictSplitterCheckTask[FR, _], Second <: StrictSplitterCheckTask[SR, _]] - (f: First, s: Second) + (f: First, s: Second) extends Composite[FR, SR, R, First, Second](f, s) { def leaf(prevr: Option[R]) = { val ftfuture = execute(ft) @@ -903,16 +934,18 @@ self: ParIterableLike[T, Repr, Sequential] => } override def requiresStrictSplitters = inner.requiresStrictSplitters } - + protected trait Transformer[R, Tp] extends Accessor[R, Tp] - - protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) extends Accessor[Unit, Foreach[S]] { + + protected[this] class Foreach[S](op: T => S, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Unit, Foreach[S]] { @volatile var result: Unit = () def leaf(prevr: Option[Unit]) = pit.foreach(op) protected[this] def newSubtask(p: IterableSplitter[T]) = new Foreach[S](op, p) } - protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Int, Count] { + protected[this] class Count(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Int, Count] { // val pittxt = pit.toString @volatile var result: Int = 0 def leaf(prevr: Option[Int]) = result = pit.count(pred) @@ -920,8 +953,9 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Count) = result = result + that.result // override def toString = "CountTask(" + pittxt + ")" } - - protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Reduce[U]] { + + protected[this] class Reduce[U >: T](op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Reduce[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.reduce(op)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Reduce(op, p) @@ -931,7 +965,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Fold[U]] { + protected[this] class Fold[U >: T](z: U, op: (U, U) => U, protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Fold[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.fold(z)(op) protected[this] def newSubtask(p: IterableSplitter[T]) = new Fold(z, op, p) @@ -946,21 +981,24 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Aggregate[S]) = result = combop(result, that.result) } - protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Sum[U]] { + protected[this] class Sum[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Sum[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.sum(num) protected[this] def newSubtask(p: IterableSplitter[T]) = new Sum(num, p) override def merge(that: Sum[U]) = result = num.plus(result, that.result) } - protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[U, Product[U]] { + protected[this] class Product[U >: T](num: Numeric[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[U, Product[U]] { @volatile var result: U = null.asInstanceOf[U] def leaf(prevr: Option[U]) = result = pit.product(num) protected[this] def newSubtask(p: IterableSplitter[T]) = new Product(num, p) override def merge(that: Product[U]) = result = num.times(result, that.result) } - protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Min[U]] { + protected[this] class Min[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Min[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.min(ord)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Min(ord, p) @@ -970,7 +1008,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Max[U]] { + protected[this] class Max[U >: T](ord: Ordering[U], protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Max[U]] { @volatile var result: Option[U] = None def leaf(prevr: Option[Option[U]]) = if (pit.remaining > 0) result = Some(pit.max(ord)) protected[this] def newSubtask(p: IterableSplitter[T]) = new Max(ord, p) @@ -980,16 +1019,16 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Map[S, That](f: T => S, pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + protected[this] class Map[S, That](f: T => S, cbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], Map[S, That]] { @volatile var result: Combiner[S, That] = null - def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, pbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, pbf, p) + def leaf(prev: Option[Combiner[S, That]]) = result = pit.map2combiner(f, reuse(prev, cbf())) + protected[this] def newSubtask(p: IterableSplitter[T]) = new Map(f, cbf, p) override def merge(that: Map[S, That]) = result = result combine that.result } protected[this] class Collect[S, That] - (pf: PartialFunction[T, S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + (pf: PartialFunction[T, S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], Collect[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.collect2combiner[S, That](pf, pbf()) @@ -998,7 +1037,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class FlatMap[S, That] - (f: T => GenTraversableOnce[S], pbf: () => Combiner[S, That], protected[this] val pit: IterableSplitter[T]) + (f: T => GenTraversableOnce[S], pbf: CombinerFactory[S, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[S, That], FlatMap[S, That]] { @volatile var result: Combiner[S, That] = null def leaf(prev: Option[Combiner[S, That]]) = result = pit.flatmap2combiner(f, pbf()) @@ -1010,28 +1049,31 @@ self: ParIterableLike[T, Repr, Sequential] => } } - protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Forall] { + protected[this] class Forall(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Boolean, Forall] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.forall(pred); if (result == false) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Forall(pred, p) override def merge(that: Forall) = result = result && that.result } - protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Boolean, Exists] { + protected[this] class Exists(pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Boolean, Exists] { @volatile var result: Boolean = false def leaf(prev: Option[Boolean]) = { if (!pit.isAborted) result = pit.exists(pred); if (result == true) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Exists(pred, p) override def merge(that: Exists) = result = result || that.result } - protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) extends Accessor[Option[U], Find[U]] { + protected[this] class Find[U >: T](pred: T => Boolean, protected[this] val pit: IterableSplitter[T]) + extends Accessor[Option[U], Find[U]] { @volatile var result: Option[U] = None def leaf(prev: Option[Option[U]]) = { if (!pit.isAborted) result = pit.find(pred); if (result != None) pit.abort } protected[this] def newSubtask(p: IterableSplitter[T]) = new Find(pred, p) override def merge(that: Find[U]) = if (this.result == None) result = that.result } - protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Filter[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Filter[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1041,7 +1083,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Filter[U, This]) = result = result combine that.result } - protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class FilterNot[U >: T, This >: Repr](pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], FilterNot[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1051,7 +1093,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: FilterNot[U, This]) = result = result combine that.result } - protected class Copy[U >: T, That](cfactory: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T]) + protected class Copy[U >: T, That](cfactory: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, That], Copy[U, That]] { @volatile var result: Combiner[U, That] = null def leaf(prev: Option[Combiner[U, That]]) = result = pit.copy2builder[U, That, Combiner[U, That]](reuse(prev, cfactory())) @@ -1059,11 +1101,12 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: Copy[U, That]) = result = result combine that.result } - protected[this] class Partition[U >: T, This >: Repr](pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Partition[U >: T, This >: Repr] + (pred: T => Boolean, cbfTrue: CombinerFactory[U, This], cbfFalse: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), Partition[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf())) - protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbf, p) + def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.partition2combiners(pred, reuse(prev.map(_._1), cbfTrue()), reuse(prev.map(_._2), cbfFalse())) + protected[this] def newSubtask(p: IterableSplitter[T]) = new Partition(pred, cbfTrue, cbfFalse, p) override def merge(that: Partition[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) } @@ -1090,7 +1133,8 @@ self: ParIterableLike[T, Repr, Sequential] => } } - protected[this] class Take[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Take[U >: T, This >: Repr] + (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Take[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = { @@ -1109,7 +1153,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Drop[U >: T, This >: Repr](n: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Drop[U >: T, This >: Repr] + (n: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Drop[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf())) @@ -1126,7 +1171,8 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Slice[U >: T, This >: Repr](from: Int, until: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class Slice[U >: T, This >: Repr] + (from: Int, until: Int, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, This], Slice[U, This]] { @volatile var result: Combiner[U, This] = null def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf())) @@ -1144,22 +1190,23 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class SplitAt[U >: T, This >: Repr](at: Int, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + protected[this] class SplitAt[U >: T, This >: Repr] + (at: Int, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), SplitAt[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null - def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbf()), reuse(prev.map(_._2), cbf())) + def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { val pits = pit.split val sizes = pits.scanLeft(0)(_ + _.remaining) - for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbf, p) + for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p) } override def merge(that: SplitAt[U, This]) = result = (result._1 combine that.result._1, result._2 combine that.result._2) override def requiresStrictSplitters = true } protected[this] class TakeWhile[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + (pos: Int, pred: T => Boolean, cbf: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Boolean), TakeWhile[U, This]] { @volatile var result: (Combiner[U, This], Boolean) = null def leaf(prev: Option[(Combiner[U, This], Boolean)]) = if (pos < pit.indexFlag) { @@ -1178,23 +1225,23 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class Span[U >: T, This >: Repr] - (pos: Int, pred: T => Boolean, cbf: () => Combiner[U, This], protected[this] val pit: IterableSplitter[T]) + (pos: Int, pred: T => Boolean, cbfBefore: CombinerFactory[U, This], cbfAfter: CombinerFactory[U, This], protected[this] val pit: IterableSplitter[T]) extends Transformer[(Combiner[U, This], Combiner[U, This]), Span[U, This]] { @volatile var result: (Combiner[U, This], Combiner[U, This]) = null def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = if (pos < pit.indexFlag) { // val lst = pit.toList // val pa = mutable.ParArray(lst: _*) // val str = "At leaf we will iterate: " + pa.splitter.toList - result = pit.span2combiners(pred, cbf(), cbf()) // do NOT reuse old combiners here, lest ye be surprised + result = pit.span2combiners(pred, cbfBefore(), cbfAfter()) // do NOT reuse old combiners here, lest ye be surprised // println("\nAt leaf result is: " + result) if (result._2.size > 0) pit.setIndexFlagIfLesser(pos) } else { - result = (reuse(prev.map(_._2), cbf()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbf()))) + result = (reuse(prev.map(_._2), cbfBefore()), pit.copy2builder[U, This, Combiner[U, This]](reuse(prev.map(_._2), cbfAfter()))) } protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { val pits = pit.split - for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbf, p) + for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p) } override def merge(that: Span[U, This]) = result = if (result._2.size == 0) { (result._1 combine that.result._1, that.result._2) @@ -1204,10 +1251,10 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class Zip[U >: T, S, That](pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) + protected[this] class Zip[U >: T, S, That](pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) extends Transformer[Combiner[(U, S), That], Zip[U, S, That]] { @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf(self.repr)) + def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = { val pits = pit.split @@ -1220,10 +1267,10 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] class ZipAll[U >: T, S, That] - (len: Int, thiselem: U, thatelem: S, pbf: CanCombineFrom[Repr, (U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) + (len: Int, thiselem: U, thatelem: S, pbf: CombinerFactory[(U, S), That], protected[this] val pit: IterableSplitter[T], val othpit: SeqSplitter[S]) extends Transformer[Combiner[(U, S), That], ZipAll[U, S, That]] { @volatile var result: Result = null - def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf(self.repr)) + def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = if (pit.remaining <= len) { val pits = pit.split @@ -1257,7 +1304,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def requiresStrictSplitters = true } - protected[this] class ToParCollection[U >: T, That](cbf: () => Combiner[U, That], protected[this] val pit: IterableSplitter[T]) + protected[this] class ToParCollection[U >: T, That](cbf: CombinerFactory[U, That], protected[this] val pit: IterableSplitter[T]) extends Transformer[Combiner[U, That], ToParCollection[U, That]] { @volatile var result: Result = null def leaf(prev: Option[Combiner[U, That]]) { @@ -1268,7 +1315,7 @@ self: ParIterableLike[T, Repr, Sequential] => override def merge(that: ToParCollection[U, That]) = result = result combine that.result } - protected[this] class ToParMap[K, V, That](cbf: () => Combiner[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) + protected[this] class ToParMap[K, V, That](cbf: CombinerFactory[(K, V), That], protected[this] val pit: IterableSplitter[T])(implicit ev: T <:< (K, V)) extends Transformer[Combiner[(K, V), That], ToParMap[K, V, That]] { @volatile var result: Result = null def leaf(prev: Option[Combiner[(K, V), That]]) { @@ -1315,13 +1362,13 @@ self: ParIterableLike[T, Repr, Sequential] => } else result = that.result override def requiresStrictSplitters = true } - + protected[this] class FromScanTree[U >: T, That] - (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CanCombineFrom[Repr, U, That]) + (tree: ScanTree[U], z: U, op: (U, U) => U, cbf: CombinerFactory[U, That]) extends StrictSplitterCheckTask[Combiner[U, That], FromScanTree[U, That]] { @volatile var result: Combiner[U, That] = null def leaf(prev: Option[Combiner[U, That]]) { - val cb = reuse(prev, cbf(self.repr)) + val cb = reuse(prev, cbf()) iterate(tree, cb) result = cb } @@ -1391,7 +1438,13 @@ self: ParIterableLike[T, Repr, Sequential] => def rightmost = this def print(depth: Int) = println((" " * depth) + this) } - + + /* alias methods */ + + def /:[S](z: S)(op: (S, T) => S): S = foldLeft(z)(op); + + def :\[S](z: S)(op: (T, S) => S): S = foldRight(z)(op); + /* debug information */ private[parallel] def debugInformation = "Parallel collection: " + this.getClass diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index d0f38b30dc..22c587b498 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -213,9 +213,9 @@ self => } otherwise seq.sameElements(that) /** Tests whether this $coll ends with the given parallel sequence. - * + * * $abortsignalling - * + * * @tparam S the type of the elements of `that` sequence * @param that the sequence to test * @return `true` if this $coll has `that` as a suffix, `false` otherwise @@ -236,12 +236,13 @@ self => val that = patch.asParSeq val pbf = bf.asParallel val pits = splitter.psplit(from, replaced, length - from - realreplaced) - val copystart = new Copy[U, That](() => pbf(repr), pits(0)) + val cfactory = combinerFactory(() => pbf(repr)) + val copystart = new Copy[U, That](cfactory, pits(0)) val copymiddle = wrap { - val tsk = new that.Copy[U, That](() => pbf(repr), that.splitter) + val tsk = new that.Copy[U, That](cfactory, that.splitter) tasksupport.executeAndWaitResult(tsk) } - val copyend = new Copy[U, That](() => pbf(repr), pits(2)) + val copyend = new Copy[U, That](cfactory, pits(2)) executeAndWaitResult(((copystart parallel copymiddle) { _ combine _ } parallel copyend) { _ combine _ } mapResult { _.result }) diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index f152629c50..f154019bac 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -83,6 +83,7 @@ package object parallel { } } + package parallel { trait FactoryOps[From, Elem, To] { trait Otherwise[R] { @@ -113,7 +114,19 @@ package parallel { } /* classes */ - + + trait CombinerFactory[U, Repr] { + /** Provides a combiner used to construct a collection. */ + def apply(): Combiner[U, Repr] + /** The call to the `apply` method can create a new combiner each time. + * If it does, this method returns `false`. + * The same combiner factory may be used each time (typically, this is + * the case for concurrent collections, which are thread safe). + * If so, the method returns `true`. + */ + def doesShareCombiners: Boolean + } + /** Composite throwable - thrown when multiple exceptions are thrown at the same time. */ final case class CompositeThrowable( val throwables: Set[Throwable] -- cgit v1.2.3 From 97912733f9e7e2c2528ebbab6b70ef35b8dd0fbc Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 25 Jan 2012 18:42:24 +0100 Subject: Get rid of unused import. --- src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 44579400ff..0b35f1b1d0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -1138,7 +1138,7 @@ defined class Foo */ case (btm@BodyTreeMaker(body, _)) :: Nil => Some(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body))) // constant - case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => import CODE._ + case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => Some(CaseDef(const, EmptyTree, btm.substitution(body))) // alternatives case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty) -- cgit v1.2.3 From aa7759651d25ab8c315a2d36e3f28cf3caaa041f Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 25 Jan 2012 18:42:52 +0100 Subject: Generate default case for switches. --- .../scala/tools/nsc/typechecker/PatMatVirtualiser.scala | 13 ++++++++++++- 1 file changed, 12 insertions(+), 1 deletion(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 0b35f1b1d0..cf5985eeee 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -1160,9 +1160,20 @@ defined class Foo */ sequence(caseDefs) map { caseDefs => import CODE._ + val caseDefsWithDefault = { + def isDefault(x: CaseDef): Boolean = x match { + case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true + case _ => false + } + val hasDefault = caseDefs exists isDefault + if (hasDefault) caseDefs else { + val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) } + caseDefs :+ default + } + } val matcher = BLOCK( VAL(scrutSym) === scrut, // TODO: type test for switchable type if patterns allow switch but the scrutinee doesn't - Match(REF(scrutSym), caseDefs) // match on scrutSym, not scrut to avoid duplicating scrut + Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut ) // matcher filter (tree => tree.tpe == null) foreach println -- cgit v1.2.3 From 39457f6c85fc9764d714d52317edcd4300fd82b8 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 1 Feb 2012 17:26:11 +0100 Subject: Convert values to Int in switchable patterns. Further improvements to how -Yvirtpatmat handles switch-like patterns that can be translated to switch tables. First of all, we added a check whether a type of an expression we pattern match on is in the set of allowed types for switch patterns. If yes, we translate a pattern to switch one by converting both an expression we pattern match on and literals in a pattern to an Int. I borrowed an idea of converting to Ints from both old pattern matcher implementation and from how javac handles it. --- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 62 ++++++++++++++-------- 1 file changed, 41 insertions(+), 21 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index cf5985eeee..b1e02cb062 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -1128,10 +1128,22 @@ defined class Foo */ // } // } + private val switchableTpes = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe) + def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = if (!optimizingCodeGen) None else { def sequence[T](xs: List[Option[T]]): Option[List[T]] = if (xs exists (_.isEmpty)) None else Some(xs.flatten) + def isSwitchableTpe(tpe: Type): Boolean = + switchableTpes contains tpe + def switchableConstToInt(x: Tree): Tree = { + val Literal(const) = x + const.tag match { + case IntTag => x + case ByteTag | ShortTag | CharTag => Literal(Constant(const.intValue)) + } + } + val caseDefs = cases map { makers => removeSubstOnly(makers) match { // default case (don't move this to unfold, as it may only occur on the top level, not as an alternative -- well, except in degenerate matches) @@ -1139,12 +1151,12 @@ defined class Foo */ Some(CaseDef(Ident(nme.WILDCARD), EmptyTree, btm.substitution(body))) // constant case (EqualityTestTreeMaker(_, const@SwitchablePattern(), _)) :: (btm@BodyTreeMaker(body, _)) :: Nil => - Some(CaseDef(const, EmptyTree, btm.substitution(body))) + Some(CaseDef(switchableConstToInt(const), EmptyTree, btm.substitution(body))) // alternatives case AlternativesTreeMaker(_, altss, _) :: (btm@BodyTreeMaker(body, _)) :: Nil => // assert(currLabel.isEmpty && nextLabel.isEmpty) val caseConstants = altss map { case EqualityTestTreeMaker(_, const@SwitchablePattern(), _) :: Nil => - Some(const) + Some(switchableConstToInt(const)) case _ => None } @@ -1158,27 +1170,35 @@ defined class Foo */ } } - sequence(caseDefs) map { caseDefs => - import CODE._ - val caseDefsWithDefault = { - def isDefault(x: CaseDef): Boolean = x match { - case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true - case _ => false - } - val hasDefault = caseDefs exists isDefault - if (hasDefault) caseDefs else { - val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) } - caseDefs :+ default + if (!isSwitchableTpe(scrut.tpe)) + None + else { + sequence(caseDefs) map { caseDefs => + import CODE._ + val caseDefsWithDefault = { + def isDefault(x: CaseDef): Boolean = x match { + case CaseDef(Ident(nme.WILDCARD), EmptyTree, _) => true + case _ => false + } + val hasDefault = caseDefs exists isDefault + if (hasDefault) caseDefs else { + val default = atPos(scrut.pos) { DEFAULT ==> MATCHERROR(REF(scrutSym)) } + caseDefs :+ default + } } + val matcher = BLOCK( + if (scrut.tpe != IntClass.tpe) { + scrutSym setInfo IntClass.tpe + VAL(scrutSym) === (scrut DOT nme.toInt) + } else { + VAL(scrutSym) === scrut + }, + Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut + ) + // matcher filter (tree => tree.tpe == null) foreach println + // treeBrowser browse matcher + matcher // set type to avoid recursion in typedMatch } - val matcher = BLOCK( - VAL(scrutSym) === scrut, // TODO: type test for switchable type if patterns allow switch but the scrutinee doesn't - Match(REF(scrutSym), caseDefsWithDefault) // match on scrutSym, not scrut to avoid duplicating scrut - ) - - // matcher filter (tree => tree.tpe == null) foreach println - // treeBrowser browse matcher - matcher // set type to avoid recursion in typedMatch } } -- cgit v1.2.3 From 556dc8c5406f9ab9c9470ff22d430693f00d2807 Mon Sep 17 00:00:00 2001 From: Grzegorz Kossakowski Date: Wed, 1 Feb 2012 17:32:48 +0100 Subject: Added a test-case for switches with Yvirtpatmat. Added a bunch of tests that cover changes related to switches that were applied to Yvirtpatmat implementation. Note: I didn't add those tests progressively because my changes fix trees after typer phase but do not affect resulting bytecode. How come? It's because -Yvirtpatmat will emit pattern for switches and then the old pattern matcher implementation would transform them in the old fashion in explicitouter. We cannot disable the old pattern matcher in explicitouter yet because it doesn't handle patterns used for catching exceptions. Thus, consider this as a sign of the fact that Yvirtpatmat is still work in progress. --- test/files/run/virtpatmat_switch.check | 7 +++++++ test/files/run/virtpatmat_switch.flags | 1 + test/files/run/virtpatmat_switch.scala | 32 ++++++++++++++++++++++++++++++++ 3 files changed, 40 insertions(+) create mode 100644 test/files/run/virtpatmat_switch.check create mode 100644 test/files/run/virtpatmat_switch.flags create mode 100644 test/files/run/virtpatmat_switch.scala diff --git a/test/files/run/virtpatmat_switch.check b/test/files/run/virtpatmat_switch.check new file mode 100644 index 0000000000..6ded95c010 --- /dev/null +++ b/test/files/run/virtpatmat_switch.check @@ -0,0 +1,7 @@ +zero +one +many +got a +got b +got some letter +scala.MatchError: 5 (of class java.lang.Integer) \ No newline at end of file diff --git a/test/files/run/virtpatmat_switch.flags b/test/files/run/virtpatmat_switch.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/run/virtpatmat_switch.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/run/virtpatmat_switch.scala b/test/files/run/virtpatmat_switch.scala new file mode 100644 index 0000000000..2e2c31e8e5 --- /dev/null +++ b/test/files/run/virtpatmat_switch.scala @@ -0,0 +1,32 @@ +object Test extends App { + def intSwitch(x: Int) = x match { + case 0 => "zero" + case 1 => "one" + case _ => "many" + } + + println(intSwitch(0)) + println(intSwitch(1)) + println(intSwitch(10)) + + def charSwitch(x: Char) = x match { + case 'a' => "got a" + case 'b' => "got b" + case _ => "got some letter" + } + + println(charSwitch('a')) + println(charSwitch('b')) + println(charSwitch('z')) + + def implicitDefault(x: Int) = x match { + case 0 => 0 + } + + try { + implicitDefault(5) + } catch { + case e: MatchError => println(e) + } + +} -- cgit v1.2.3 From 8aa87f15e3887dbeb1a39bfea002b56cf68c445a Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 1 Feb 2012 18:24:50 +0100 Subject: Remove ParIterator and SignalContextPassingIterator. This unclutters the namespace and makes defining custom parallel collections a lot easier. --- .../collection/parallel/ParIterableLike.scala | 64 +++++-------------- .../scala/collection/parallel/ParMapLike.scala | 2 - .../scala/collection/parallel/ParSeqLike.scala | 71 ++++++---------------- .../collection/parallel/RemainsIterator.scala | 38 +++++++++--- .../collection/parallel/immutable/ParHashMap.scala | 17 +++--- .../collection/parallel/immutable/ParHashSet.scala | 20 +++--- .../collection/parallel/immutable/ParRange.scala | 18 +++--- .../collection/parallel/immutable/ParVector.scala | 7 +-- .../collection/parallel/immutable/package.scala | 12 ++-- .../collection/parallel/mutable/ParArray.scala | 23 +++---- .../collection/parallel/mutable/ParHashMap.scala | 9 +-- .../collection/parallel/mutable/ParHashSet.scala | 9 +-- .../collection/parallel/mutable/ParHashTable.scala | 2 +- .../scala/collection/parallel/package.scala | 3 +- 14 files changed, 113 insertions(+), 182 deletions(-) diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 75f4552076..b24497371d 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -171,42 +171,6 @@ self: ParIterableLike[T, Repr, Sequential] => def seq: Sequential def repr: Repr = this.asInstanceOf[Repr] - - /** Parallel iterators are split iterators that have additional accessor and - * transformer methods defined in terms of methods `next` and `hasNext`. - * When creating a new parallel collection, one might want to override these - * new methods to make them more efficient. - * - * Parallel iterators are augmented with signalling capabilities. This means - * that a signalling object can be assigned to them as needed. - * - * The self-type ensures that signal context passing behaviour gets mixed in - * a concrete object instance. - */ - trait ParIterator extends IterableSplitter[T] { - me: SignalContextPassingIterator[ParIterator] => - var signalDelegate: Signalling = IdleSignalling - def repr = self.repr - def split: Seq[IterableSplitter[T]] - } - - /** A stackable modification that ensures signal contexts get passed along the iterators. - * A self-type requirement in `ParIterator` ensures that this trait gets mixed into - * concrete iterators. - */ - trait SignalContextPassingIterator[+IterRepr <: ParIterator] extends ParIterator { - // Note: This functionality must be factored out to this inner trait to avoid boilerplate. - // Also, one could omit the cast below. However, this leads to return type inconsistencies, - // due to inability to override the return type of _abstract overrides_. - // Be aware that this stackable modification has to be subclassed, so it shouldn't be rigid - // on the type of iterators it splits. - // The alternative is some boilerplate - better to tradeoff some type safety to avoid it here. - abstract override def split: Seq[IterRepr] = { - val pits = super.split - pits foreach { _.signalDelegate = signalDelegate } - pits.asInstanceOf[Seq[IterRepr]] - } - } def hasDefiniteSize = true @@ -870,7 +834,7 @@ self: ParIterableLike[T, Repr, Sequential] => protected[this] val pit: IterableSplitter[T] protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel) - def split = pit.split.map(newSubtask(_)) // default split procedure + def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure private[parallel] override def signalAbort = pit.abort override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" } @@ -1142,7 +1106,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, untilp) <- pits zip sizes; if untilp <= n) yield { if (untilp + p.remaining < n) new Take(p.remaining, cbf, p) @@ -1160,7 +1124,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Combiner[U, This]]) = result = pit.drop2combiner(n, reuse(prev, cbf())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, withp) <- pits zip sizes.tail; if withp >= n) yield { if (withp - p.remaining > n) new Drop(0, cbf, p) @@ -1178,7 +1142,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Combiner[U, This]]) = result = pit.slice2combiner(from, until, reuse(prev, cbf())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, untilp) <- pits zip sizes; if untilp + p.remaining >= from || untilp <= until) yield { val f = (from max untilp) - untilp @@ -1197,7 +1161,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[(Combiner[U, This], Combiner[U, This])]) = result = pit.splitAt2combiners(at, reuse(prev.map(_._1), cbfBefore()), reuse(prev.map(_._2), cbfAfter())) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.scanLeft(0)(_ + _.remaining) for ((p, untilp) <- pits zip sizes) yield new SplitAt((at max untilp min (untilp + p.remaining)) - untilp, cbfBefore, cbfAfter, p) } @@ -1215,7 +1179,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else result = (reuse(prev.map(_._1), cbf()), false) protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new TakeWhile(pos + untilp, pred, cbf, p) } override def merge(that: TakeWhile[U, This]) = if (result._2) { @@ -1240,7 +1204,7 @@ self: ParIterableLike[T, Repr, Sequential] => } protected[this] def newSubtask(p: IterableSplitter[T]) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Span(pos + untilp, pred, cbfBefore, cbfAfter, p) } override def merge(that: Span[U, This]) = result = if (result._2.size == 0) { @@ -1257,9 +1221,9 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Result]) = result = pit.zip2combiner[U, S, That](othpit, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.map(_.remaining) - val opits = othpit.psplit(sizes: _*) + val opits = othpit.psplitWithSignalling(sizes: _*) (pits zip opits) map { p => new Zip(pbf, p._1, p._2) } } override def merge(that: Zip[U, S, That]) = result = result combine that.result @@ -1273,12 +1237,12 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Result]) = result = pit.zipAll2combiner[U, S, That](othpit, thiselem, thatelem, pbf()) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = if (pit.remaining <= len) { - val pits = pit.split + val pits = pit.splitWithSignalling val sizes = pits.map(_.remaining) - val opits = othpit.psplit(sizes: _*) + val opits = othpit.psplitWithSignalling(sizes: _*) ((pits zip opits) zip sizes) map { t => new ZipAll(t._2, thiselem, thatelem, pbf, t._1._1, t._1._2) } } else { - val opits = othpit.psplit(pit.remaining) + val opits = othpit.psplitWithSignalling(pit.remaining) val diff = len - pit.remaining Seq( new ZipAll(pit.remaining, thiselem, thatelem, pbf, pit, opits(0)), // nothing wrong will happen with the cast below - elem T is never accessed @@ -1295,7 +1259,7 @@ self: ParIterableLike[T, Repr, Sequential] => def leaf(prev: Option[Unit]) = pit.copyToArray(array, from, len) protected[this] def newSubtask(p: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining); if untilp < len) yield { val plen = p.remaining min (len - untilp) new CopyToArray[U, This](from + untilp, plen, array, p) @@ -1352,7 +1316,7 @@ self: ParIterableLike[T, Repr, Sequential] => } else trees(from) protected[this] def newSubtask(pit: IterableSplitter[T]) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield { new CreateScanTree(untilp, p.remaining, z, op, p) } diff --git a/src/library/scala/collection/parallel/ParMapLike.scala b/src/library/scala/collection/parallel/ParMapLike.scala index beb50a41e1..afd1f30903 100644 --- a/src/library/scala/collection/parallel/ParMapLike.scala +++ b/src/library/scala/collection/parallel/ParMapLike.scala @@ -66,7 +66,6 @@ self => new IterableSplitter[K] { i => val iter = s - var signalDelegate: Signalling = IdleSignalling def hasNext = iter.hasNext def next() = iter.next._1 def split = { @@ -84,7 +83,6 @@ self => new IterableSplitter[V] { i => val iter = s - var signalDelegate: Signalling = IdleSignalling def hasNext = iter.hasNext def next() = iter.next._2 def split = { diff --git a/src/library/scala/collection/parallel/ParSeqLike.scala b/src/library/scala/collection/parallel/ParSeqLike.scala index 22c587b498..6a5ee5c69b 100644 --- a/src/library/scala/collection/parallel/ParSeqLike.scala +++ b/src/library/scala/collection/parallel/ParSeqLike.scala @@ -48,35 +48,6 @@ self => type SuperParIterator = IterableSplitter[T] - /** An iterator that can be split into arbitrary subsets of iterators. - * The self-type requirement ensures that the signal context passing behaviour gets mixed in - * the concrete iterator instance in some concrete collection. - * - * '''Note:''' In concrete collection classes, collection implementers might want to override the iterator - * `reverse2builder` method to ensure higher efficiency. - */ - trait ParIterator extends SeqSplitter[T] with super.ParIterator { - me: SignalContextPassingIterator[ParIterator] => - def split: Seq[ParIterator] - def psplit(sizes: Int*): Seq[ParIterator] - } - - /** A stackable modification that ensures signal contexts get passed along the iterators. - * A self-type requirement in `ParIterator` ensures that this trait gets mixed into - * concrete iterators. - */ - trait SignalContextPassingIterator[+IterRepr <: ParIterator] - extends ParIterator with super.SignalContextPassingIterator[IterRepr] { - // Note: See explanation in `ParallelIterableLike.this.SignalContextPassingIterator` - // to understand why we do the cast here, and have a type parameter. - // Bottomline: avoiding boilerplate and fighting against inability to override stackable modifications. - abstract override def psplit(sizes: Int*): Seq[IterRepr] = { - val pits = super.psplit(sizes: _*) - pits foreach { _.signalDelegate = signalDelegate } - pits.asInstanceOf[Seq[IterRepr]] - } - } - /** A more refined version of the iterator found in the `ParallelIterable` trait, * this iterator can be split into arbitrary subsets of iterators. * @@ -89,9 +60,7 @@ self => override def size = length /** Used to iterate elements using indices */ - protected abstract class Elements(start: Int, val end: Int) extends ParIterator with BufferedIterator[T] { - me: SignalContextPassingIterator[ParIterator] => - + protected abstract class Elements(start: Int, val end: Int) extends SeqSplitter[T] with BufferedIterator[T] { private var i = start def hasNext = i < end @@ -106,14 +75,14 @@ self => final def remaining = end - i - def dup = new Elements(i, end) with SignalContextPassingIterator[ParIterator] + def dup = new Elements(i, end) {} def split = psplit(remaining / 2, remaining - remaining / 2) def psplit(sizes: Int*) = { val incr = sizes.scanLeft(0)(_ + _) for ((from, until) <- incr.init zip incr.tail) yield { - new Elements(start + from, (start + until) min end) with SignalContextPassingIterator[ParIterator] + new Elements(start + from, (start + until) min end) {} } } @@ -138,7 +107,7 @@ self => val realfrom = if (from < 0) 0 else from val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new SegmentLength(p, 0, splitter.psplit(realfrom, length - realfrom)(1) assign ctx))._1 + executeAndWaitResult(new SegmentLength(p, 0, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx))._1 } /** Finds the first element satisfying some predicate. @@ -156,7 +125,7 @@ self => val realfrom = if (from < 0) 0 else from val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MaxValue) - executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplit(realfrom, length - realfrom)(1) assign ctx)) + executeAndWaitResult(new IndexWhere(p, realfrom, splitter.psplitWithSignalling(realfrom, length - realfrom)(1) assign ctx)) } /** Finds the last element satisfying some predicate. @@ -174,7 +143,7 @@ self => val until = if (end >= length) length else end + 1 val ctx = new DefaultSignalling with AtomicIndexFlag ctx.setIndexFlag(Int.MinValue) - executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplit(until, length - until)(0) assign ctx)) + executeAndWaitResult(new LastIndexWhere(p, 0, splitter.psplitWithSignalling(until, length - until)(0) assign ctx)) } def reverse: Repr = { @@ -203,7 +172,7 @@ self => else if (pthat.length > length - offset) false else { val ctx = new DefaultSignalling with VolatileAbort - executeAndWaitResult(new SameElements(splitter.psplit(offset, pthat.length)(1) assign ctx, pthat.splitter)) + executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(offset, pthat.length)(1) assign ctx, pthat.splitter)) } } otherwise seq.startsWith(that, offset) @@ -226,7 +195,7 @@ self => else { val ctx = new DefaultSignalling with VolatileAbort val tlen = that.length - executeAndWaitResult(new SameElements(splitter.psplit(length - tlen, tlen)(1) assign ctx, pthat.splitter)) + executeAndWaitResult(new SameElements(splitter.psplitWithSignalling(length - tlen, tlen)(1) assign ctx, pthat.splitter)) } } otherwise seq.endsWith(that) @@ -235,7 +204,7 @@ self => if (patch.isParSeq && bf.isParallel && (size - realreplaced + patch.size) > MIN_FOR_COPY) { val that = patch.asParSeq val pbf = bf.asParallel - val pits = splitter.psplit(from, replaced, length - from - realreplaced) + val pits = splitter.psplitWithSignalling(from, replaced, length - from - realreplaced) val cfactory = combinerFactory(() => pbf(repr)) val copystart = new Copy[U, That](cfactory, pits(0)) val copymiddle = wrap { @@ -253,7 +222,7 @@ self => val from = 0 max fromarg val b = bf(repr) val repl = (r min (length - from)) max 0 - val pits = splitter.psplit(from, repl, length - from - repl) + val pits = splitter.psplitWithSignalling(from, repl, length - from - repl) b ++= pits(0) b ++= patch b ++= pits(2) @@ -373,7 +342,7 @@ self => } else result = (0, false) protected[this] def newSubtask(p: SuperParIterator) = throw new UnsupportedOperationException override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new SegmentLength(pred, from + untilp, p) } override def merge(that: SegmentLength) = if (result._2) result = (result._1 + that.result._1, that.result._2) @@ -392,7 +361,7 @@ self => } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(from)(_ + _.remaining)) yield new IndexWhere(pred, untilp, p) } override def merge(that: IndexWhere) = result = if (result == -1) that.result else { @@ -413,7 +382,7 @@ self => } protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(pos)(_ + _.remaining)) yield new LastIndexWhere(pred, untilp, p) } override def merge(that: LastIndexWhere) = result = if (result == -1) that.result else { @@ -438,7 +407,7 @@ self => override def merge(that: ReverseMap[S, That]) = result = that.result combine result } - protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[U]) + protected[this] class SameElements[U >: T](protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[U]) extends Accessor[Boolean, SameElements[U]] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { @@ -449,7 +418,7 @@ self => override def split = { val fp = pit.remaining / 2 val sp = pit.remaining - fp - for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new SameElements(p, op) + for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new SameElements(p, op) } override def merge(that: SameElements[U]) = result = result && that.result override def requiresStrictSplitters = true @@ -461,7 +430,7 @@ self => def leaf(prev: Option[Combiner[U, That]]) = result = pit.updated2combiner(pos, elem, pbf()) protected[this] def newSubtask(p: SuperParIterator) = unsupported override def split = { - val pits = pit.split + val pits = pit.splitWithSignalling for ((p, untilp) <- pits zip pits.scanLeft(0)(_ + _.remaining)) yield new Updated(pos - untilp, elem, pbf, p) } override def merge(that: Updated[U, That]) = result = result combine that.result @@ -476,8 +445,8 @@ self => override def split = { val fp = len / 2 val sp = len - len / 2 - val pits = pit.psplit(fp, sp) - val opits = otherpit.psplit(fp, sp) + val pits = pit.psplitWithSignalling(fp, sp) + val opits = otherpit.psplitWithSignalling(fp, sp) Seq( new Zip(fp, pbf, pits(0), opits(0)), new Zip(sp, pbf, pits(1), opits(1)) @@ -486,7 +455,7 @@ self => override def merge(that: Zip[U, S, That]) = result = result combine that.result } - protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: PreciseSplitter[S]) + protected[this] class Corresponds[S](corr: (T, S) => Boolean, protected[this] val pit: SeqSplitter[T], val otherpit: SeqSplitter[S]) extends Accessor[Boolean, Corresponds[S]] { @volatile var result: Boolean = true def leaf(prev: Option[Boolean]) = if (!pit.isAborted) { @@ -497,7 +466,7 @@ self => override def split = { val fp = pit.remaining / 2 val sp = pit.remaining - fp - for ((p, op) <- pit.psplit(fp, sp) zip otherpit.psplit(fp, sp)) yield new Corresponds(corr, p, op) + for ((p, op) <- pit.psplitWithSignalling(fp, sp) zip otherpit.psplitWithSignalling(fp, sp)) yield new Corresponds(corr, p, op) } override def merge(that: Corresponds[S]) = result = result && that.result override def requiresStrictSplitters = true diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index e04e0e9c72..e8b332da89 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -14,6 +14,7 @@ package scala.collection.parallel import scala.collection.Parallel import scala.collection.generic.Signalling import scala.collection.generic.DelegatedSignalling +import scala.collection.generic.IdleSignalling import scala.collection.generic.CanCombineFrom import scala.collection.mutable.Builder import scala.collection.Iterator.empty @@ -380,12 +381,20 @@ extends AugmentedIterableIterator[T] with DelegatedSignalling { self => - + + var signalDelegate: Signalling = IdleSignalling + /** Creates a copy of this iterator. */ def dup: IterableSplitter[T] def split: Seq[IterableSplitter[T]] - + + def splitWithSignalling: Seq[IterableSplitter[T]] = { + val pits = split + pits foreach { _.signalDelegate = signalDelegate } + pits + } + /** The number of elements this iterator has yet to traverse. This method * doesn't change the state of the iterator. * @@ -421,7 +430,6 @@ self => /* iterator transformers */ class Taken(taken: Int) extends IterableSplitter[T] { - var signalDelegate = self.signalDelegate var remaining = taken min self.remaining def hasNext = remaining > 0 def next = { remaining -= 1; self.next } @@ -450,7 +458,7 @@ self => override def slice(from1: Int, until1: Int): IterableSplitter[T] = newSliceInternal(newTaken(until1), from1) class Mapped[S](f: T => S) extends IterableSplitter[S] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext def next = f(self.next) def remaining = self.remaining @@ -461,7 +469,7 @@ self => override def map[S](f: T => S) = new Mapped(f) class Appended[U >: T, PI <: IterableSplitter[U]](protected val that: PI) extends IterableSplitter[U] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate protected var curr: IterableSplitter[U] = self def hasNext = if (curr.hasNext) true else if (curr eq self) { curr = that @@ -480,7 +488,7 @@ self => def appendParIterable[U >: T, PI <: IterableSplitter[U]](that: PI) = new Appended[U, PI](that) class Zipped[S](protected val that: SeqSplitter[S]) extends IterableSplitter[(T, S)] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext && that.hasNext def next = (self.next, that.next) def remaining = self.remaining min that.remaining @@ -497,7 +505,7 @@ self => class ZippedAll[U >: T, S](protected val that: SeqSplitter[S], protected val thiselem: U, protected val thatelem: S) extends IterableSplitter[(U, S)] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate def hasNext = self.hasNext || that.hasNext def next = if (self.hasNext) { if (that.hasNext) (self.next, that.next) @@ -534,6 +542,18 @@ self => def split: Seq[SeqSplitter[T]] def psplit(sizes: Int*): Seq[SeqSplitter[T]] + override def splitWithSignalling: Seq[SeqSplitter[T]] = { + val pits = split + pits foreach { _.signalDelegate = signalDelegate } + pits + } + + def psplitWithSignalling(sizes: Int*): Seq[SeqSplitter[T]] = { + val pits = psplit(sizes: _*) + pits foreach { _.signalDelegate = signalDelegate } + pits + } + /** The number of elements this iterator has yet to traverse. This method * doesn't change the state of the iterator. Unlike the version of this method in the supertrait, * method `remaining` in `ParSeqLike.this.ParIterator` must return an exact number @@ -626,13 +646,13 @@ self => def reverse: SeqSplitter[T] = { val pa = mutable.ParArray.fromTraversables(self).reverse - new pa.ParArrayIterator with pa.SCPI { + new pa.ParArrayIterator { override def reverse = self } } class Patched[U >: T](from: Int, patch: SeqSplitter[U], replaced: Int) extends SeqSplitter[U] { - var signalDelegate = self.signalDelegate + signalDelegate = self.signalDelegate private[this] val trio = { val pits = self.psplit(from, replaced, self.remaining - from - replaced) (pits(0).appendParSeq[U, SeqSplitter[U]](patch)) appendParSeq pits(2) diff --git a/src/library/scala/collection/parallel/immutable/ParHashMap.scala b/src/library/scala/collection/parallel/immutable/ParHashMap.scala index e785932933..7adf51cffb 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashMap.scala @@ -52,7 +52,7 @@ self => protected[this] override def newCombiner = HashMapCombiner[K, V] - def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) with SCPI + def splitter: IterableSplitter[(K, V)] = new ParHashMapIterator(trie.iterator, trie.size) override def seq = trie @@ -69,11 +69,8 @@ self => case None => newc } - type SCPI = SignalContextPassingIterator[ParHashMapIterator] - class ParHashMapIterator(var triter: Iterator[(K, V @uncheckedVariance)], val sz: Int) - extends super.ParIterator { - self: SignalContextPassingIterator[ParHashMapIterator] => + extends IterableSplitter[(K, V)] { var i = 0 def dup = triter match { case t: TrieIterator[_] => @@ -84,24 +81,24 @@ self => dupFromIterator(buff.iterator) } private def dupFromIterator(it: Iterator[(K, V @uncheckedVariance)]) = { - val phit = new ParHashMapIterator(it, sz) with SCPI + val phit = new ParHashMapIterator(it, sz) phit.i = i phit } - def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match { + def split: Seq[IterableSplitter[(K, V)]] = if (remaining < 2) Seq(this) else triter match { case t: TrieIterator[_] => val previousRemaining = remaining val ((fst, fstlength), snd) = t.split val sndlength = previousRemaining - fstlength Seq( - new ParHashMapIterator(fst, fstlength) with SCPI, - new ParHashMapIterator(snd, sndlength) with SCPI + new ParHashMapIterator(fst, fstlength), + new ParHashMapIterator(snd, sndlength) ) case _ => // iterator of the collision map case val buff = triter.toBuffer val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) with SCPI } + Seq(fp, sp) map { b => new ParHashMapIterator(b.iterator, b.length) } } def next(): (K, V) = { i += 1 diff --git a/src/library/scala/collection/parallel/immutable/ParHashSet.scala b/src/library/scala/collection/parallel/immutable/ParHashSet.scala index 8332167b90..1cf0ccd391 100644 --- a/src/library/scala/collection/parallel/immutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/immutable/ParHashSet.scala @@ -49,7 +49,7 @@ self => override def empty: ParHashSet[T] = new ParHashSet[T] - def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) with SCPI + def splitter: IterableSplitter[T] = new ParHashSetIterator(trie.iterator, trie.size) override def seq = trie @@ -66,11 +66,8 @@ self => case None => newc } - type SCPI = SignalContextPassingIterator[ParHashSetIterator] - class ParHashSetIterator(var triter: Iterator[T], val sz: Int) - extends super.ParIterator { - self: SignalContextPassingIterator[ParHashSetIterator] => + extends IterableSplitter[T] { var i = 0 def dup = triter match { case t: TrieIterator[_] => @@ -81,24 +78,24 @@ self => dupFromIterator(buff.iterator) } private def dupFromIterator(it: Iterator[T]) = { - val phit = new ParHashSetIterator(it, sz) with SCPI + val phit = new ParHashSetIterator(it, sz) phit.i = i phit } - def split: Seq[ParIterator] = if (remaining < 2) Seq(this) else triter match { + def split: Seq[IterableSplitter[T]] = if (remaining < 2) Seq(this) else triter match { case t: TrieIterator[_] => val previousRemaining = remaining val ((fst, fstlength), snd) = t.split val sndlength = previousRemaining - fstlength Seq( - new ParHashSetIterator(fst, fstlength) with SCPI, - new ParHashSetIterator(snd, sndlength) with SCPI + new ParHashSetIterator(fst, fstlength), + new ParHashSetIterator(snd, sndlength) ) case _ => // iterator of the collision map case val buff = triter.toBuffer val (fp, sp) = buff.splitAt(buff.length / 2) - Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) with SCPI } + Seq(fp, sp) map { b => new ParHashSetIterator(b.iterator, b.length) } } def next(): T = { i += 1 @@ -111,6 +108,7 @@ self => } } + /** $factoryInfo * @define Coll immutable.ParHashSet * @define coll immutable parallel hash set @@ -124,6 +122,7 @@ object ParHashSet extends ParSetFactory[ParHashSet] { def fromTrie[T](t: HashSet[T]) = new ParHashSet(t) } + private[immutable] abstract class HashSetCombiner[T] extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombiner[T]](HashSetCombiner.rootsize) { //self: EnvironmentPassingCombiner[T, ParHashSet[T]] => @@ -207,6 +206,7 @@ extends collection.parallel.BucketCombiner[T, ParHashSet[T], Any, HashSetCombine } } + object HashSetCombiner { def apply[T] = new HashSetCombiner[T] {} // was: with EnvironmentPassingCombiner[T, ParHashSet[T]] {} diff --git a/src/library/scala/collection/parallel/immutable/ParRange.scala b/src/library/scala/collection/parallel/immutable/ParRange.scala index 350e64739f..64e07ce4ff 100644 --- a/src/library/scala/collection/parallel/immutable/ParRange.scala +++ b/src/library/scala/collection/parallel/immutable/ParRange.scala @@ -10,6 +10,7 @@ package scala.collection.parallel.immutable import scala.collection.immutable.Range import scala.collection.parallel.Combiner +import scala.collection.parallel.SeqSplitter import scala.collection.generic.CanCombineFrom import scala.collection.parallel.IterableSplitter import scala.collection.Iterator @@ -41,13 +42,10 @@ self => @inline final def apply(idx: Int) = range.apply(idx); - def splitter = new ParRangeIterator with SCPI - - type SCPI = SignalContextPassingIterator[ParRangeIterator] + def splitter = new ParRangeIterator class ParRangeIterator(range: Range = self.range) - extends ParIterator { - me: SignalContextPassingIterator[ParRangeIterator] => + extends SeqSplitter[Int] { override def toString = "ParRangeIterator(over: " + range + ")" private var ind = 0 private val len = range.length @@ -64,15 +62,15 @@ self => private def rangeleft = range.drop(ind) - def dup = new ParRangeIterator(rangeleft) with SCPI + def dup = new ParRangeIterator(rangeleft) def split = { val rleft = rangeleft val elemleft = rleft.length - if (elemleft < 2) Seq(new ParRangeIterator(rleft) with SCPI) + if (elemleft < 2) Seq(new ParRangeIterator(rleft)) else Seq( - new ParRangeIterator(rleft.take(elemleft / 2)) with SCPI, - new ParRangeIterator(rleft.drop(elemleft / 2)) with SCPI + new ParRangeIterator(rleft.take(elemleft / 2)), + new ParRangeIterator(rleft.drop(elemleft / 2)) ) } @@ -81,7 +79,7 @@ self => for (sz <- sizes) yield { val fronttaken = rleft.take(sz) rleft = rleft.drop(sz) - new ParRangeIterator(fronttaken) with SCPI + new ParRangeIterator(fronttaken) } } diff --git a/src/library/scala/collection/parallel/immutable/ParVector.scala b/src/library/scala/collection/parallel/immutable/ParVector.scala index fdeaefc3ff..5d9c431bc1 100644 --- a/src/library/scala/collection/parallel/immutable/ParVector.scala +++ b/src/library/scala/collection/parallel/immutable/ParVector.scala @@ -48,22 +48,19 @@ extends ParSeq[T] def this() = this(Vector()) - type SCPI = SignalContextPassingIterator[ParVectorIterator] - def apply(idx: Int) = vector.apply(idx) def length = vector.length def splitter: SeqSplitter[T] = { - val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) with SCPI + val pit = new ParVectorIterator(vector.startIndex, vector.endIndex) vector.initIterator(pit) pit } override def seq: Vector[T] = vector - class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with ParIterator { - self: SCPI => + class ParVectorIterator(_start: Int, _end: Int) extends VectorIterator[T](_start, _end) with SeqSplitter[T] { def remaining: Int = remainingElementCount def dup: SeqSplitter[T] = (new ParVector(remainingVector)).splitter def split: Seq[ParVectorIterator] = { diff --git a/src/library/scala/collection/parallel/immutable/package.scala b/src/library/scala/collection/parallel/immutable/package.scala index 7b1e39d092..63635537d7 100644 --- a/src/library/scala/collection/parallel/immutable/package.scala +++ b/src/library/scala/collection/parallel/immutable/package.scala @@ -22,23 +22,19 @@ package immutable { override def seq = throw new UnsupportedOperationException def update(idx: Int, elem: T) = throw new UnsupportedOperationException - type SCPI = SignalContextPassingIterator[ParIterator] - - class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends super.ParIterator { - me: SignalContextPassingIterator[ParIterator] => - + class ParIterator(var i: Int = 0, val until: Int = length, elem: T = self.elem) extends SeqSplitter[T] { def remaining = until - i def hasNext = i < until def next = { i += 1; elem } - def dup = new ParIterator(i, until, elem) with SCPI + def dup = new ParIterator(i, until, elem) def psplit(sizes: Int*) = { val incr = sizes.scanLeft(0)(_ + _) - for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) with SCPI + for ((start, end) <- incr.init zip incr.tail) yield new ParIterator(i + start, (i + end) min until, elem) } def split = psplit(remaining / 2, remaining - remaining / 2) } - def splitter = new ParIterator with SCPI + def splitter = new ParIterator } } diff --git a/src/library/scala/collection/parallel/mutable/ParArray.scala b/src/library/scala/collection/parallel/mutable/ParArray.scala index a1eb3beb0c..72a8184b10 100644 --- a/src/library/scala/collection/parallel/mutable/ParArray.scala +++ b/src/library/scala/collection/parallel/mutable/ParArray.scala @@ -19,6 +19,7 @@ import scala.collection.generic.CanBuildFrom import scala.collection.generic.ParFactory import scala.collection.generic.Sizing import scala.collection.parallel.Combiner +import scala.collection.parallel.SeqSplitter import scala.collection.parallel.ParSeqLike import scala.collection.parallel.CHECK_RATE import scala.collection.mutable.ArraySeq @@ -74,17 +75,13 @@ self => override def seq = arrayseq - type SCPI = SignalContextPassingIterator[ParArrayIterator] - protected[parallel] def splitter: ParArrayIterator = { - val pit = new ParArrayIterator with SCPI + val pit = new ParArrayIterator pit } class ParArrayIterator(var i: Int = 0, val until: Int = length, val arr: Array[Any] = array) - extends super.ParIterator { - me: SignalContextPassingIterator[ParArrayIterator] => - + extends SeqSplitter[T] { def hasNext = i < until def next = { @@ -95,9 +92,9 @@ self => def remaining = until - i - def dup = new ParArrayIterator(i, until, arr) with SCPI + def dup = new ParArrayIterator(i, until, arr) - def psplit(sizesIncomplete: Int*): Seq[ParIterator] = { + def psplit(sizesIncomplete: Int*): Seq[ParArrayIterator] = { var traversed = i val total = sizesIncomplete.reduceLeft(_ + _) val left = remaining @@ -106,19 +103,19 @@ self => val start = traversed val end = (traversed + sz) min until traversed = end - new ParArrayIterator(start, end, arr) with SCPI + new ParArrayIterator(start, end, arr) } else { - new ParArrayIterator(traversed, traversed, arr) with SCPI + new ParArrayIterator(traversed, traversed, arr) } } - override def split: Seq[ParIterator] = { + override def split: Seq[ParArrayIterator] = { val left = remaining if (left >= 2) { val splitpoint = left / 2 val sq = Seq( - new ParArrayIterator(i, i + splitpoint, arr) with SCPI, - new ParArrayIterator(i + splitpoint, until, arr) with SCPI) + new ParArrayIterator(i, i + splitpoint, arr), + new ParArrayIterator(i + splitpoint, until, arr)) i = until sq } else { diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 31750b0b0d..3b4d3dc0b0 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -56,7 +56,7 @@ self => override def seq = new collection.mutable.HashMap[K, V](hashTableContents) - def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) with SCPI + def splitter = new ParHashMapIterator(1, table.length, size, table(0).asInstanceOf[DefaultEntry[K, V]]) override def size = tableSize @@ -93,14 +93,11 @@ self => override def stringPrefix = "ParHashMap" - type SCPI = SignalContextPassingIterator[ParHashMapIterator] - class ParHashMapIterator(start: Int, untilIdx: Int, totalSize: Int, e: DefaultEntry[K, V]) - extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) with ParIterator { - me: SCPI => + extends EntryIterator[(K, V), ParHashMapIterator](start, untilIdx, totalSize, e) { def entry2item(entry: DefaultEntry[K, V]) = (entry.key, entry.value); def newIterator(idxFrom: Int, idxUntil: Int, totalSz: Int, es: DefaultEntry[K, V]) = - new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) with SCPI + new ParHashMapIterator(idxFrom, idxUntil, totalSz, es) } private def writeObject(out: java.io.ObjectOutputStream) { diff --git a/src/library/scala/collection/parallel/mutable/ParHashSet.scala b/src/library/scala/collection/parallel/mutable/ParHashSet.scala index 7763cdf318..6c5f513ad0 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashSet.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashSet.scala @@ -66,14 +66,11 @@ extends ParSet[T] def contains(elem: T) = containsEntry(elem) - def splitter = new ParHashSetIterator(0, table.length, size) with SCPI - - type SCPI = SignalContextPassingIterator[ParHashSetIterator] + def splitter = new ParHashSetIterator(0, table.length, size) class ParHashSetIterator(start: Int, iteratesUntil: Int, totalElements: Int) - extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) with ParIterator { - me: SCPI => - def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) with SCPI + extends ParFlatHashTableIterator(start, iteratesUntil, totalElements) { + def newIterator(start: Int, until: Int, total: Int) = new ParHashSetIterator(start, until, total) } private def writeObject(s: java.io.ObjectOutputStream) { diff --git a/src/library/scala/collection/parallel/mutable/ParHashTable.scala b/src/library/scala/collection/parallel/mutable/ParHashTable.scala index 9b8e233b95..8c93732427 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashTable.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashTable.scala @@ -29,7 +29,7 @@ trait ParHashTable[K, Entry >: Null <: HashEntry[K, Entry]] extends collection.m /** A parallel iterator returning all the entries. */ abstract class EntryIterator[T, +IterRepr <: IterableSplitter[T]] - (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) + (private var idx: Int, private val until: Int, private val totalsize: Int, private var es: Entry) extends IterableSplitter[T] with SizeMapUtils { private val itertable = table private var traversed = 0 diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index f154019bac..cdb9944fdc 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -140,8 +140,9 @@ package parallel { * Automatically forwards the signal delegate when splitting. */ private[parallel] class BufferSplitter[T] - (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, var signalDelegate: collection.generic.Signalling) + (private val buffer: collection.mutable.ArrayBuffer[T], private var index: Int, private val until: Int, _sigdel: collection.generic.Signalling) extends IterableSplitter[T] { + signalDelegate = _sigdel def hasNext = index < until def next = { val r = buffer(index) -- cgit v1.2.3 From f55db64983edfeb9484b7617e2b59f8994c37ef3 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 08:06:24 -0800 Subject: Fix for bad bug with accidental overrides. An object in a subclass would silently override an inherited method, then throw a CCE at runtime. I blamed this on matchesType and altered it accordingly. There's a pretty extensive test case which reflects my expectations. Review by @odersky please. Closes SI-5429. --- src/compiler/scala/reflect/internal/Types.scala | 10 ++ .../tools/nsc/transform/OverridingPairs.scala | 10 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 + test/files/neg/t5429.check | 132 +++++++++++++++++++++ test/files/neg/t5429.scala | 93 +++++++++++++++ 5 files changed, 245 insertions(+), 2 deletions(-) create mode 100644 test/files/neg/t5429.check create mode 100644 test/files/neg/t5429.scala diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 371fb8d585..c8b960ebe8 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -5477,6 +5477,8 @@ trait Types extends api.Types { self: SymbolTable => else matchesType(tp1, res2, alwaysMatchSimple) case ExistentialType(_, res2) => alwaysMatchSimple && matchesType(tp1, res2, true) + case TypeRef(_, sym, Nil) => + params1.isEmpty && sym.isModuleClass && matchesType(res1, sym.tpe, alwaysMatchSimple) case _ => false } @@ -5488,6 +5490,8 @@ trait Types extends api.Types { self: SymbolTable => matchesType(res1, res2, alwaysMatchSimple) case ExistentialType(_, res2) => alwaysMatchSimple && matchesType(tp1, res2, true) + case TypeRef(_, sym, Nil) if sym.isModuleClass => + matchesType(res1, sym.tpe, alwaysMatchSimple) case _ => matchesType(res1, tp2, alwaysMatchSimple) } @@ -5508,6 +5512,12 @@ trait Types extends api.Types { self: SymbolTable => if (alwaysMatchSimple) matchesType(res1, tp2, true) else lastTry } + case TypeRef(_, sym, Nil) if sym.isModuleClass => + tp2 match { + case MethodType(Nil, res2) => matchesType(sym.tpe, res2, alwaysMatchSimple) + case NullaryMethodType(res2) => matchesType(sym.tpe, res2, alwaysMatchSimple) + case _ => lastTry + } case _ => lastTry } diff --git a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala index 1200e973c5..e49f8d7c0b 100644 --- a/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala +++ b/src/compiler/scala/tools/nsc/transform/OverridingPairs.scala @@ -45,8 +45,14 @@ abstract class OverridingPairs { * Types always match. Term symbols match if their membertypes * relative to .this do */ - protected def matches(sym1: Symbol, sym2: Symbol): Boolean = - sym1.isType || (self.memberType(sym1) matches self.memberType(sym2)) + protected def matches(sym1: Symbol, sym2: Symbol): Boolean = { + def tp_s(s: Symbol) = self.memberType(s) + "/" + self.memberType(s).getClass + val result = sym1.isType || (self.memberType(sym1) matches self.memberType(sym2)) + debuglog("overriding-pairs? %s matches %s (%s vs. %s) == %s".format( + sym1.fullLocationString, sym2.fullLocationString, tp_s(sym1), tp_s(sym2), result)) + + result + } /** An implementation of BitSets as arrays (maybe consider collection.BitSet * for that?) The main purpose of this is to implement diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a6c2f75d5e..5aa1843188 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -276,6 +276,8 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R * of class `clazz` are met. */ def checkOverride(member: Symbol, other: Symbol) { + debuglog("Checking validity of %s overriding %s".format(member.fullLocationString, other.fullLocationString)) + def memberTp = self.memberType(member) def otherTp = self.memberType(other) def noErrorType = other.tpe != ErrorType && member.tpe != ErrorType diff --git a/test/files/neg/t5429.check b/test/files/neg/t5429.check new file mode 100644 index 0000000000..1b89c59587 --- /dev/null +++ b/test/files/neg/t5429.check @@ -0,0 +1,132 @@ +t5429.scala:20: error: overriding value value in class A of type Int; + object value needs `override' modifier + object value // fail + ^ +t5429.scala:21: error: overriding lazy value lazyvalue in class A of type Int; + object lazyvalue needs `override' modifier + object lazyvalue // fail + ^ +t5429.scala:22: error: overriding method nullary in class A of type => Int; + object nullary needs `override' modifier + object nullary // fail + ^ +t5429.scala:23: error: overriding method emptyArg in class A of type ()Int; + object emptyArg needs `override' modifier + object emptyArg // fail + ^ +t5429.scala:27: error: overriding value value in class A0 of type Any; + object value needs `override' modifier + object value // fail + ^ +t5429.scala:28: error: overriding lazy value lazyvalue in class A0 of type Any; + object lazyvalue needs `override' modifier + object lazyvalue // fail + ^ +t5429.scala:29: error: overriding method nullary in class A0 of type => Any; + object nullary needs `override' modifier + object nullary // fail + ^ +t5429.scala:30: error: overriding method emptyArg in class A0 of type ()Any; + object emptyArg needs `override' modifier + object emptyArg // fail + ^ +t5429.scala:35: error: overriding value value in class A of type Int; + object value has incompatible type + override object value // fail + ^ +t5429.scala:36: error: overriding lazy value lazyvalue in class A of type Int; + object lazyvalue must be declared lazy to override a concrete lazy value + override object lazyvalue // fail + ^ +t5429.scala:37: error: overriding method nullary in class A of type => Int; + object nullary has incompatible type + override object nullary // fail + ^ +t5429.scala:38: error: overriding method emptyArg in class A of type ()Int; + object emptyArg has incompatible type + override object emptyArg // fail + ^ +t5429.scala:39: error: object oneArg overrides nothing + override object oneArg // fail + ^ +t5429.scala:43: error: overriding lazy value lazyvalue in class A0 of type Any; + object lazyvalue must be declared lazy to override a concrete lazy value + override object lazyvalue // !!! this fails, but should succeed (lazy over lazy) + ^ +t5429.scala:46: error: object oneArg overrides nothing + override object oneArg // fail + ^ +t5429.scala:50: error: overriding value value in class A of type Int; + value value needs `override' modifier + val value = 0 // fail + ^ +t5429.scala:51: error: overriding lazy value lazyvalue in class A of type Int; + value lazyvalue needs `override' modifier + val lazyvalue = 0 // fail + ^ +t5429.scala:52: error: overriding method nullary in class A of type => Int; + value nullary needs `override' modifier + val nullary = 5 // fail + ^ +t5429.scala:53: error: overriding method emptyArg in class A of type ()Int; + value emptyArg needs `override' modifier + val emptyArg = 10 // fail + ^ +t5429.scala:58: error: overriding lazy value lazyvalue in class A0 of type Any; + value lazyvalue must be declared lazy to override a concrete lazy value + override val lazyvalue = 0 // fail (non-lazy) + ^ +t5429.scala:61: error: value oneArg overrides nothing + override val oneArg = 15 // fail + ^ +t5429.scala:65: error: overriding value value in class A of type Int; + method value needs `override' modifier + def value = 0 // fail + ^ +t5429.scala:66: error: overriding lazy value lazyvalue in class A of type Int; + method lazyvalue needs `override' modifier + def lazyvalue = 2 // fail + ^ +t5429.scala:67: error: overriding method nullary in class A of type => Int; + method nullary needs `override' modifier + def nullary = 5 // fail + ^ +t5429.scala:68: error: overriding method emptyArg in class A of type ()Int; + method emptyArg needs `override' modifier + def emptyArg = 10 // fail + ^ +t5429.scala:72: error: overriding value value in class A0 of type Any; + method value needs to be a stable, immutable value + override def value = 0 // fail + ^ +t5429.scala:73: error: overriding lazy value lazyvalue in class A0 of type Any; + method lazyvalue needs to be a stable, immutable value + override def lazyvalue = 2 // fail + ^ +t5429.scala:76: error: method oneArg overrides nothing + override def oneArg = 15 // fail + ^ +t5429.scala:80: error: overriding value value in class A of type Int; + lazy value value needs `override' modifier + lazy val value = 0 // fail + ^ +t5429.scala:81: error: overriding lazy value lazyvalue in class A of type Int; + lazy value lazyvalue needs `override' modifier + lazy val lazyvalue = 2 // fail + ^ +t5429.scala:82: error: overriding method nullary in class A of type => Int; + lazy value nullary needs `override' modifier + lazy val nullary = 5 // fail + ^ +t5429.scala:83: error: overriding method emptyArg in class A of type ()Int; + lazy value emptyArg needs `override' modifier + lazy val emptyArg = 10 // fail + ^ +t5429.scala:87: error: overriding value value in class A0 of type Any; + lazy value value cannot override a concrete non-lazy value + override lazy val value = 0 // fail (strict over lazy) + ^ +t5429.scala:91: error: value oneArg overrides nothing + override lazy val oneArg = 15 // fail + ^ +34 errors found diff --git a/test/files/neg/t5429.scala b/test/files/neg/t5429.scala new file mode 100644 index 0000000000..1cd4dcd032 --- /dev/null +++ b/test/files/neg/t5429.scala @@ -0,0 +1,93 @@ +// /scala/trac/5429/a.scala +// Wed Feb 1 08:05:27 PST 2012 + +class A { + val value = 0 + lazy val lazyvalue = 2 + def nullary = 5 + def emptyArg() = 10 + def oneArg(x: String) = 15 +} +class A0 { + val value: Any = 0 + lazy val lazyvalue: Any = 2 + def nullary: Any = 5 + def emptyArg(): Any = 10 + def oneArg(x: String): Any = 15 +} + +class B extends A { + object value // fail + object lazyvalue // fail + object nullary // fail + object emptyArg // fail + object oneArg // overload +} +class B0 extends A0 { + object value // fail + object lazyvalue // fail + object nullary // fail + object emptyArg // fail + object oneArg // overload +} + +class C extends A { + override object value // fail + override object lazyvalue // fail + override object nullary // fail + override object emptyArg // fail + override object oneArg // fail +} +class C0 extends A0 { + override object value // !!! this succeeds, but should fail (lazy over strict) + override object lazyvalue // !!! this fails, but should succeed (lazy over lazy) + override object nullary // override + override object emptyArg // override + override object oneArg // fail +} + +class D extends A { + val value = 0 // fail + val lazyvalue = 0 // fail + val nullary = 5 // fail + val emptyArg = 10 // fail + val oneArg = 15 // overload +} +class D0 extends A0 { + override val value = 0 // override + override val lazyvalue = 0 // fail (non-lazy) + override val nullary = 5 // override + override val emptyArg = 10 // override + override val oneArg = 15 // fail +} + +class E extends A { + def value = 0 // fail + def lazyvalue = 2 // fail + def nullary = 5 // fail + def emptyArg = 10 // fail + def oneArg = 15 // overload +} +class E0 extends A0 { + override def value = 0 // fail + override def lazyvalue = 2 // fail + override def nullary = 5 // override + override def emptyArg = 10 // override + override def oneArg = 15 // fail +} + +class F extends A { + lazy val value = 0 // fail + lazy val lazyvalue = 2 // fail + lazy val nullary = 5 // fail + lazy val emptyArg = 10 // fail + lazy val oneArg = 15 // overload +} +class F0 extends A0 { + override lazy val value = 0 // fail (strict over lazy) + override lazy val lazyvalue = 2 // override (lazy over lazy) + override lazy val nullary = 5 // override + override lazy val emptyArg = 10 // override + override lazy val oneArg = 15 // fail +} + -- cgit v1.2.3 From b2a21c4eacdddb0ee59a8c74c8a73e6cc34cb6bc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 10:21:22 -0800 Subject: Added getPackage to the repl classloader. --- .../nsc/interpreter/AbstractFileClassLoader.scala | 4 ++++ .../scala/tools/nsc/interpreter/IMain.scala | 26 ++++++++++++++++++++++ 2 files changed, 30 insertions(+) diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 3bc4e1cbe1..70fa740eeb 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -20,6 +20,10 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) with ScalaClassLoader { // private val defined = mutable.Map[String, Class[_]]() + + // Widening to public + override def getPackage(name: String) = super.getPackage(name) + override protected def trace = sys.props contains "scala.debug.classloader" diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 56bb72ca6f..4ccea8afd6 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -314,6 +314,26 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) { private[IMain] var traceClassLoading = isReplTrace override protected def trace = super.trace || traceClassLoading + + private val packages = mutable.HashMap[String, Package]() + private def enclosingPackageNames(name: String): List[String] = + (name split '.').inits.toList drop 1 dropRight 1 map (_ mkString ".") reverse + + // Here's what all those params to definePackage are after the package name: + // + // specTitle - The specification title + // specVersion - The specification version + // specVendor - The specification vendor + // implTitle - The implementation title + // implVersion - The implementation version + // implVendor - The implementation vendor + // sealBase - If not null, then this package is sealed with respect to the given code source URL object. Otherwise, the package is not sealed. + private def addPackageNames(name: String) { + enclosingPackageNames(name) filterNot (packages contains _) foreach { p => + packages(p) = definePackage(p, "", "", "", "", "", "", null) + repltrace("Added " + packages(p) + " to repl classloader.") + } + } /** Overridden here to try translating a simple name to the generated * class name if the original attempt fails. This method is used by @@ -328,6 +348,12 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends file } } + override def findClass(name: String): JClass = { + val clazz = super.findClass(name) + if (clazz ne null) + addPackageNames(clazz.getName) + clazz + } } private def makeClassLoader(): AbstractFileClassLoader = new TranslatingClassLoader(parentClassLoader match { -- cgit v1.2.3 From c05b850cf5655943e861e0b898b253e83e0e094b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 10:36:06 -0800 Subject: Fix for slice boundary condition. Negative "to" index should be normalized to 0 before using it in a difference operation. --- src/library/scala/collection/IndexedSeqOptimized.scala | 2 +- test/files/run/buffer-slice.check | 1 + test/files/run/buffer-slice.scala | 5 +++++ 3 files changed, 7 insertions(+), 1 deletion(-) create mode 100644 test/files/run/buffer-slice.check create mode 100644 test/files/run/buffer-slice.scala diff --git a/src/library/scala/collection/IndexedSeqOptimized.scala b/src/library/scala/collection/IndexedSeqOptimized.scala index e2541f2a66..196e77c91b 100755 --- a/src/library/scala/collection/IndexedSeqOptimized.scala +++ b/src/library/scala/collection/IndexedSeqOptimized.scala @@ -104,7 +104,7 @@ trait IndexedSeqOptimized[+A, +Repr] extends IndexedSeqLike[A, Repr] { self => override /*IterableLike*/ def slice(from: Int, until: Int): Repr = { val lo = math.max(from, 0) - val hi = math.min(until, length) + val hi = math.min(math.max(until, 0), length) val elems = math.max(hi - lo, 0) val b = newBuilder b.sizeHint(elems) diff --git a/test/files/run/buffer-slice.check b/test/files/run/buffer-slice.check new file mode 100644 index 0000000000..5287aa9d7b --- /dev/null +++ b/test/files/run/buffer-slice.check @@ -0,0 +1 @@ +ArrayBuffer() diff --git a/test/files/run/buffer-slice.scala b/test/files/run/buffer-slice.scala new file mode 100644 index 0000000000..ddd82e0751 --- /dev/null +++ b/test/files/run/buffer-slice.scala @@ -0,0 +1,5 @@ +object Test { + def main(args: Array[String]): Unit = { + println(scala.collection.mutable.ArrayBuffer().slice(102450392, -2045033354)) + } +} -- cgit v1.2.3 From 5fe2d8b109abf3ff3e2d82dd4f248200846795c3 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 1 Feb 2012 19:54:50 +0100 Subject: Add the Ctrie concurrent map implementation. Ctrie is a scalable concurrent map implementation that supports constant time lock-free lazy snapshots. Due to the well-known private volatile field problem, atomic reference updaters cannot be used efficiently in Scala yet. For this reason, 4 java files had to be included as well. None of these pollute the namespace, as most of the classes are private. Unit tests and a scalacheck check is also included. --- .../scala/collection/mutable/BasicNode.java | 20 + src/library/scala/collection/mutable/Ctrie.scala | 906 +++++++++++++++++++++ src/library/scala/collection/mutable/Gen.java | 18 + .../scala/collection/mutable/INodeBase.java | 35 + src/library/scala/collection/mutable/MainNode.java | 36 + test/files/run/ctries/DumbHash.scala | 14 + test/files/run/ctries/Wrap.scala | 9 + test/files/run/ctries/concmap.scala | 169 ++++ test/files/run/ctries/iterator.scala | 279 +++++++ test/files/run/ctries/lnode.scala | 58 ++ test/files/run/ctries/main.scala | 45 + test/files/run/ctries/snapshot.scala | 267 ++++++ test/files/scalacheck/Ctrie.scala | 199 +++++ 13 files changed, 2055 insertions(+) create mode 100644 src/library/scala/collection/mutable/BasicNode.java create mode 100644 src/library/scala/collection/mutable/Ctrie.scala create mode 100644 src/library/scala/collection/mutable/Gen.java create mode 100644 src/library/scala/collection/mutable/INodeBase.java create mode 100644 src/library/scala/collection/mutable/MainNode.java create mode 100644 test/files/run/ctries/DumbHash.scala create mode 100644 test/files/run/ctries/Wrap.scala create mode 100644 test/files/run/ctries/concmap.scala create mode 100644 test/files/run/ctries/iterator.scala create mode 100644 test/files/run/ctries/lnode.scala create mode 100644 test/files/run/ctries/main.scala create mode 100644 test/files/run/ctries/snapshot.scala create mode 100644 test/files/scalacheck/Ctrie.scala diff --git a/src/library/scala/collection/mutable/BasicNode.java b/src/library/scala/collection/mutable/BasicNode.java new file mode 100644 index 0000000000..b934aed24f --- /dev/null +++ b/src/library/scala/collection/mutable/BasicNode.java @@ -0,0 +1,20 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + + + + +abstract class BasicNode { + + public abstract String string(int lev); + +} \ No newline at end of file diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala new file mode 100644 index 0000000000..d02e0ce178 --- /dev/null +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -0,0 +1,906 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable + + + +import java.util.concurrent.atomic._ +import collection.immutable.{ ListMap => ImmutableListMap } +import annotation.tailrec +import annotation.switch + + + +private[mutable] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { + import INodeBase._ + + WRITE(bn) + + def this(g: Gen) = this(null, g) + + @inline final def WRITE(nval: MainNode[K, V]) = INodeBase.updater.set(this, nval) + + @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + + @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = { + val m = /*READ*/mainnode + val prevval = /*READ*/m.prev + if (prevval eq null) m + else GCAS_Complete(m, ct) + } + + @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else { + // complete the GCAS + val prev = /*READ*/m.prev + val ctr = ct.RDCSS_READ_ROOT(true) + + prev match { + case null => + m + case fn: FailedNode[_, _] => // try to commit to previous value + if (CAS(m, fn.prev)) fn.prev + else GCAS_Complete(/*READ*/mainnode, ct) + case vn: MainNode[_, _] => + // Assume that you've read the root from the generation G. + // Assume that the snapshot algorithm is correct. + // ==> you can only reach nodes in generations <= G. + // ==> `gen` is <= G. + // We know that `ctr.gen` is >= G. + // ==> if `ctr.gen` = `gen` then they are both equal to G. + // ==> otherwise, we know that either `ctr.gen` > G, `gen` < G, + // or both + if ((ctr.gen eq gen) && ct.nonReadOnly) { + // try to commit + if (m.CAS_PREV(prev, null)) m + else GCAS_Complete(m, ct) + } else { + // try to abort + m.CAS_PREV(prev, new FailedNode(prev)) + GCAS_Complete(/*READ*/mainnode, ct) + } + } + } + + @inline final def GCAS(old: MainNode[K, V], n: MainNode[K, V], ct: Ctrie[K, V]): Boolean = { + n.WRITE_PREV(old) + if (CAS(old, n)) { + GCAS_Complete(n, ct) + /*READ*/n.prev eq null + } else false + } + + @inline private def inode(cn: MainNode[K, V]) = { + val nin = new INode[K, V](gen) + nin.WRITE(cn) + nin + } + + @inline final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = { + val nin = new INode[K, V](ngen) + val main = GCAS_READ(ct) + nin.WRITE(main) + nin + } + + /** Inserts a key value pair, overwriting the old pair if the keys match. + * + * @return true if successful, false otherwise + */ + @tailrec final def rec_insert(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Boolean = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_insert(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insert(k, v, hc, lev, parent, startgen, ct) + else false + } + case sn: SNode[K, V] => + if (sn.hc == hc && sn.k == k) GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + GCAS(cn, nn, ct) + } + } + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + GCAS(cn, ncnode, ct) + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + false + case ln: LNode[K, V] => // 3) an l-node + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + } + + /** Inserts a new key value pair, given that a specific condition is met. + * + * @param cond null - don't care if the key was there; KEY_ABSENT - key wasn't there; KEY_PRESENT - key was there; other value `v` - key must be bound to `v` + * @return null if unsuccessful, Option[V] otherwise (indicating previous value bound to the key) + */ + @tailrec final def rec_insertif(k: K, v: V, hc: Int, cond: AnyRef, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multiway node + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + val mask = flag - 1 + val pos = Integer.bitCount(bmp & mask) + if ((bmp & flag) != 0) { + // 1a) insert below + cn.array(pos) match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_insertif(k, v, hc, cond, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_insertif(k, v, hc, cond, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] => cond match { + case null => + if (sn.hc == hc && sn.k == k) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_ABSENT => + if (sn.hc == hc && sn.k == k) Some(sn.v) + else { + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val nn = rn.updatedAt(pos, inode(CNode.dual(sn, sn.hc, new SNode(k, v, hc), hc, lev + 5, gen)), gen) + if (GCAS(cn, nn, ct)) None + else null + } + case INode.KEY_PRESENT => + if (sn.hc == hc && sn.k == k) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + case otherv: V => + if (sn.hc == hc && sn.k == k && sn.v == otherv) { + if (GCAS(cn, cn.updatedAt(pos, new SNode(k, v, hc), gen), ct)) Some(sn.v) else null + } else None + } + } + } else cond match { + case null | INode.KEY_ABSENT => + val rn = if (cn.gen eq gen) cn else cn.renewed(gen, ct) + val ncnode = rn.insertedAt(pos, flag, new SNode(k, v, hc), gen) + if (GCAS(cn, ncnode, ct)) None else null + case INode.KEY_PRESENT => None + case otherv: V => None + } + case sn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => // 3) an l-node + @inline def insertln() = { + val nn = ln.inserted(k, v) + GCAS(ln, nn, ct) + } + cond match { + case null => + val optv = ln.get(k) + if (insertln()) optv else null + case INode.KEY_ABSENT => + ln.get(k) match { + case None => if (insertln()) None else null + case optv => optv + } + case INode.KEY_PRESENT => + ln.get(k) match { + case Some(v0) => if (insertln()) Some(v0) else null + case None => None + } + case otherv: V => + ln.get(k) match { + case Some(v0) if v0 == otherv => if (insertln()) Some(otherv) else null + case _ => None + } + } + } + } + + /** Looks up the value associated with the key. + * + * @return null if no value has been found, RESTART if the operation wasn't successful, or any other value otherwise + */ + @tailrec final def rec_lookup(k: K, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): AnyRef = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => // 1) a multinode + val idx = (hc >>> lev) & 0x1f + val flag = 1 << idx + val bmp = cn.bitmap + if ((bmp & flag) == 0) null // 1a) bitmap shows no binding + else { // 1b) bitmap contains a value - descend + val pos = if (bmp == 0xffffffff) idx else Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + sub match { + case in: INode[K, V] => + if (ct.isReadOnly || (startgen eq in.gen)) in.rec_lookup(k, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_lookup(k, hc, lev, parent, startgen, ct) + else return RESTART // used to be throw RestartException + } + case sn: SNode[K, V] => // 2) singleton node + if (sn.hc == hc && sn.k == k) sn.v.asInstanceOf[AnyRef] + else null + } + } + case tn: TNode[K, V] => // 3) non-live node + def cleanReadOnly(tn: TNode[K, V]) = if (ct.nonReadOnly) { + clean(parent, ct, lev - 5) + RESTART // used to be throw RestartException + } else { + if (tn.hc == hc && tn.k == k) tn.v.asInstanceOf[AnyRef] + else null + } + cleanReadOnly(tn) + case ln: LNode[K, V] => // 5) an l-node + ln.get(k).asInstanceOf[Option[AnyRef]].orNull + } + } + + /** Removes the key associated with the given value. + * + * @param v if null, will remove the key irregardless of the value; otherwise removes only if binding contains that exact key and value + * @return null if not successful, an Option[V] indicating the previous value otherwise + */ + final def rec_remove(k: K, v: V, hc: Int, lev: Int, parent: INode[K, V], startgen: Gen, ct: Ctrie[K, V]): Option[V] = { + val m = GCAS_READ(ct) // use -Yinline! + + m match { + case cn: CNode[K, V] => + val idx = (hc >>> lev) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) None + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + val res = sub match { + case in: INode[K, V] => + if (startgen eq in.gen) in.rec_remove(k, v, hc, lev + 5, this, startgen, ct) + else { + if (GCAS(cn, cn.renewed(startgen, ct), ct)) rec_remove(k, v, hc, lev, parent, startgen, ct) + else null + } + case sn: SNode[K, V] => + if (sn.hc == hc && sn.k == k && (v == null || sn.v == v)) { + val ncn = cn.removedAt(pos, flag, gen).toContracted(lev) + if (GCAS(cn, ncn, ct)) Some(sn.v) else null + } else None + } + + if (res == None || (res eq null)) res + else { + @tailrec def cleanParent(nonlive: AnyRef) { + val pm = parent.GCAS_READ(ct) + pm match { + case cn: CNode[K, V] => + val idx = (hc >>> (lev - 5)) & 0x1f + val bmp = cn.bitmap + val flag = 1 << idx + if ((bmp & flag) == 0) {} // somebody already removed this i-node, we're done + else { + val pos = Integer.bitCount(bmp & (flag - 1)) + val sub = cn.array(pos) + if (sub eq this) nonlive match { + case tn: TNode[K, V] => + val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) + if (!parent.GCAS(cn, ncn, ct)) + if (ct.RDCSS_READ_ROOT().gen == startgen) cleanParent(nonlive) + } + } + case _ => // parent is no longer a cnode, we're done + } + } + + if (parent ne null) { // never tomb at root + val n = GCAS_READ(ct) + if (n.isInstanceOf[TNode[_, _]]) + cleanParent(n) + } + + res + } + } + case tn: TNode[K, V] => + clean(parent, ct, lev - 5) + null + case ln: LNode[K, V] => + if (v == null) { + val optv = ln.get(k) + val nn = ln.removed(k) + if (GCAS(ln, nn, ct)) optv else null + } else ln.get(k) match { + case optv @ Some(v0) if v0 == v => + val nn = ln.removed(k) + if (GCAS(ln, nn, ct)) optv else null + case _ => None + } + } + } + + private def clean(nd: INode[K, V], ct: Ctrie[K, V], lev: Int) { + val m = nd.GCAS_READ(ct) + m match { + case cn: CNode[K, V] => nd.GCAS(cn, cn.toCompressed(ct, lev, gen), ct) + case _ => + } + } + + final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null + + /* this is a quiescent method! */ + def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { + case null => "" + case tn: TNode[_, _] => "TNode(%s, %s, %d, !)".format(tn.k, tn.v, tn.hc) + case cn: CNode[_, _] => cn.string(lev) + case ln: LNode[_, _] => ln.string(lev) + case x => "".format(x) + }) + +} + + +private[mutable] object INode { + val KEY_PRESENT = new AnyRef + val KEY_ABSENT = new AnyRef + + def newRootNode[K, V] = { + val gen = new Gen + val cn = new CNode[K, V](0, new Array(0), gen) + new INode[K, V](cn, gen) + } +} + + +private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNode[K, V] { + WRITE_PREV(p) + + def string(lev: Int) = throw new UnsupportedOperationException + + override def toString = "FailedNode(%s)".format(p) +} + + +private[mutable] trait KVNode[K, V] { + def kvPair: (K, V) +} + + +private[mutable] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) +extends BasicNode with KVNode[K, V] { + final def copy = new SNode(k, v, hc) + final def copyTombed = new TNode(k, v, hc) + final def copyUntombed = new SNode(k, v, hc) + final def kvPair = (k, v) + final def string(lev: Int) = (" " * lev) + "SNode(%s, %s, %x)".format(k, v, hc) +} + + +private[mutable] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) +extends MainNode[K, V] with KVNode[K, V] { + final def copy = new TNode(k, v, hc) + final def copyTombed = new TNode(k, v, hc) + final def copyUntombed = new SNode(k, v, hc) + final def kvPair = (k, v) + final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) +} + + +private[mutable] final class LNode[K, V](final val listmap: ImmutableListMap[K, V]) +extends MainNode[K, V] { + def this(k: K, v: V) = this(ImmutableListMap(k -> v)) + def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2)) + def inserted(k: K, v: V) = new LNode(listmap + ((k, v))) + def removed(k: K): MainNode[K, V] = { + val updmap = listmap - k + if (updmap.size > 1) new LNode(updmap) + else { + val (k, v) = updmap.iterator.next + new TNode(k, v, k.hashCode) // create it tombed so that it gets compressed on subsequent accesses + } + } + def get(k: K) = listmap.get(k) + def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", ")) +} + + +private[mutable] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen) +extends MainNode[K, V] { + + final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val narr = new Array[BasicNode](len) + Array.copy(array, 0, narr, 0, len) + narr(pos) = nn + new CNode[K, V](bitmap, narr, gen) + } + + final def removedAt(pos: Int, flag: Int, gen: Gen) = { + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len - 1) + Array.copy(arr, 0, narr, 0, pos) + Array.copy(arr, pos + 1, narr, pos, len - pos - 1) + new CNode[K, V](bitmap ^ flag, narr, gen) + } + + final def insertedAt(pos: Int, flag: Int, nn: BasicNode, gen: Gen) = { + val len = array.length + val bmp = bitmap + val narr = new Array[BasicNode](len + 1) + Array.copy(array, 0, narr, 0, pos) + narr(pos) = nn + Array.copy(array, pos, narr, pos + 1, len - pos) + new CNode[K, V](bmp | flag, narr, gen) + } + + /** Returns a copy of this cnode such that all the i-nodes below it are copied + * to the specified generation `ngen`. + */ + final def renewed(ngen: Gen, ct: Ctrie[K, V]) = { + var i = 0 + val arr = array + val len = arr.length + val narr = new Array[BasicNode](len) + while (i < len) { + arr(i) match { + case in: INode[K, V] => narr(i) = in.copyToGen(ngen, ct) + case bn: BasicNode => narr(i) = bn + } + i += 1 + } + new CNode[K, V](bitmap, narr, ngen) + } + + private def resurrect(inode: INode[K, V], inodemain: AnyRef): BasicNode = inodemain match { + case tn: TNode[_, _] => tn.copyUntombed + case _ => inode + } + + final def toContracted(lev: Int): MainNode[K, V] = if (array.length == 1 && lev > 0) array(0) match { + case sn: SNode[K, V] => sn.copyTombed + case _ => this + } else this + + // - if the branching factor is 1 for this CNode, and the child + // is a tombed SNode, returns its tombed version + // - otherwise, if there is at least one non-null node below, + // returns the version of this node with at least some null-inodes + // removed (those existing when the op began) + // - if there are only null-i-nodes below, returns null + final def toCompressed(ct: Ctrie[K, V], lev: Int, gen: Gen) = { + var bmp = bitmap + var i = 0 + val arr = array + val tmparray = new Array[BasicNode](arr.length) + while (i < arr.length) { // construct new bitmap + val sub = arr(i) + sub match { + case in: INode[K, V] => + val inodemain = in.GCAS_READ(ct) + assert(inodemain ne null) + tmparray(i) = resurrect(in, inodemain) + case sn: SNode[K, V] => + tmparray(i) = sn + } + i += 1 + } + + new CNode[K, V](bmp, tmparray, gen).toContracted(lev) + } + + private[mutable] def string(lev: Int): String = "CNode %x\n%s".format(bitmap, array.map(_.string(lev + 1)).mkString("\n")) + + /* quiescently consistent - don't call concurrently to anything involving a GCAS!! */ + protected def collectElems: Seq[(K, V)] = array flatMap { + case sn: SNode[K, V] => Some(sn.kvPair) + case in: INode[K, V] => in.mainnode match { + case tn: TNode[K, V] => Some(tn.kvPair) + case ln: LNode[K, V] => ln.listmap.toList + case cn: CNode[K, V] => cn.collectElems + } + } + + protected def collectLocalElems: Seq[String] = array flatMap { + case sn: SNode[K, V] => Some(sn.kvPair._2.toString) + case in: INode[K, V] => Some(in.toString.drop(14) + "(" + in.gen + ")") + } + + override def toString = { + val elems = collectLocalElems + "CNode(sz: %d; %s)".format(elems.size, elems.sorted.mkString(", ")) + } +} + + +private[mutable] object CNode { + + def dual[K, V](x: SNode[K, V], xhc: Int, y: SNode[K, V], yhc: Int, lev: Int, gen: Gen): MainNode[K, V] = if (lev < 35) { + val xidx = (xhc >>> lev) & 0x1f + val yidx = (yhc >>> lev) & 0x1f + val bmp = (1 << xidx) | (1 << yidx) + if (xidx == yidx) { + val subinode = new INode[K, V](gen)//(Ctrie.inodeupdater) + subinode.mainnode = dual(x, xhc, y, yhc, lev + 5, gen) + new CNode(bmp, Array(subinode), gen) + } else { + if (xidx < yidx) new CNode(bmp, Array(x, y), gen) + else new CNode(bmp, Array(y, x), gen) + } + } else { + new LNode(x.k, x.v, y.k, y.v) + } + +} + + +private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]) { + @volatile var committed = false +} + + +class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef]) +extends ConcurrentMap[K, V] +{ + private val rootupdater = rtupd + @volatile var root = r + + def this() = this( + INode.newRootNode, + AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root") + ) + + /* internal methods */ + + @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + + @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { + val r = /*READ*/root + r match { + case in: INode[K, V] => in + case desc: RDCSS_Descriptor[K, V] => RDCSS_Complete(abort) + } + } + + @tailrec private def RDCSS_Complete(abort: Boolean): INode[K, V] = { + val v = /*READ*/root + v match { + case in: INode[K, V] => in + case desc: RDCSS_Descriptor[K, V] => + val RDCSS_Descriptor(ov, exp, nv) = desc + if (abort) { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } else { + val oldmain = ov.GCAS_READ(this) + if (oldmain eq exp) { + if (CAS_ROOT(desc, nv)) { + desc.committed = true + nv + } else RDCSS_Complete(abort) + } else { + if (CAS_ROOT(desc, ov)) ov + else RDCSS_Complete(abort) + } + } + } + } + + private def RDCSS_ROOT(ov: INode[K, V], expectedmain: MainNode[K, V], nv: INode[K, V]): Boolean = { + val desc = RDCSS_Descriptor(ov, expectedmain, nv) + if (CAS_ROOT(ov, desc)) { + RDCSS_Complete(false) + /*READ*/desc.committed + } else false + } + + @inline private def computeHash(k: K): Int = { + k.hashCode + } + + @tailrec private def inserthc(k: K, hc: Int, v: V) { + val r = RDCSS_READ_ROOT() + if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) + } + + @tailrec private def insertifhc(k: K, hc: Int, v: V, cond: AnyRef): Option[V] = { + val r = RDCSS_READ_ROOT() + + val ret = r.rec_insertif(k, v, hc, cond, 0, null, r.gen, this) + if (ret eq null) insertifhc(k, hc, v, cond) + else ret + } + + @tailrec private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + val res = r.rec_lookup(k, hc, 0, null, r.gen, this) + if (res eq INodeBase.RESTART) lookuphc(k, hc) + else res + } + + /* + //@tailrec + private def lookuphc(k: K, hc: Int): AnyRef = { + val r = RDCSS_READ_ROOT() + try { + r.rec_lookup(k, hc, 0, null, r.gen, this) + } catch { + case RestartException => + lookuphc(k, hc) + } + } + */ + + @tailrec private def removehc(k: K, v: V, hc: Int): Option[V] = { + val r = RDCSS_READ_ROOT() + val res = r.rec_remove(k, v, hc, 0, null, r.gen, this) + if (res ne null) res + else removehc(k, v, hc) + } + + def string = RDCSS_READ_ROOT().string(0) + + /* public methods */ + + @inline final def isReadOnly = rootupdater eq null + + @inline final def nonReadOnly = rootupdater ne null + + @tailrec final def snapshot(): Ctrie[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.GCAS_READ(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater) + else snapshot() + } + + @tailrec final def readOnlySnapshot(): collection.Map[K, V] = { + val r = RDCSS_READ_ROOT() + val expmain = r.GCAS_READ(this) + if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null) + else readOnlySnapshot() + } + + @tailrec final override def clear() { + val r = RDCSS_READ_ROOT() + if (!RDCSS_ROOT(r, r.GCAS_READ(this), INode.newRootNode[K, V])) clear() + } + + final def lookup(k: K): V = { + val hc = computeHash(k) + lookuphc(k, hc).asInstanceOf[V] + } + + final override def apply(k: K): V = { + val hc = computeHash(k) + val res = lookuphc(k, hc) + if (res eq null) throw new NoSuchElementException + else res.asInstanceOf[V] + } + + final def get(k: K): Option[V] = { + val hc = computeHash(k) + Option(lookuphc(k, hc)).asInstanceOf[Option[V]] + } + + override def put(key: K, value: V): Option[V] = { + val hc = computeHash(key) + insertifhc(key, hc, value, null) + } + + final override def update(k: K, v: V) { + val hc = computeHash(k) + inserthc(k, hc, v) + } + + final def +=(kv: (K, V)) = { + update(kv._1, kv._2) + this + } + + final override def remove(k: K): Option[V] = { + val hc = computeHash(k) + removehc(k, null.asInstanceOf[V], hc) + } + + final def -=(k: K) = { + remove(k) + this + } + + def putIfAbsent(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_ABSENT) + } + + def remove(k: K, v: V): Boolean = { + val hc = computeHash(k) + removehc(k, v, hc).nonEmpty + } + + def replace(k: K, oldvalue: V, newvalue: V): Boolean = { + val hc = computeHash(k) + insertifhc(k, hc, newvalue, oldvalue.asInstanceOf[AnyRef]).nonEmpty + } + + def replace(k: K, v: V): Option[V] = { + val hc = computeHash(k) + insertifhc(k, hc, v, INode.KEY_PRESENT) + } + + def iterator: Iterator[(K, V)] = + if (nonReadOnly) readOnlySnapshot().iterator + else new CtrieIterator(this) + +} + + +object Ctrie { + val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[AnyRef], "mainnode") +} + + +private[mutable] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { + var stack = new Array[Array[BasicNode]](7) + var stackpos = new Array[Int](7) + var depth = -1 + var subiter: Iterator[(K, V)] = null + var current: KVNode[K, V] = null + + if (mustInit) initialize() + + def hasNext = (current ne null) || (subiter ne null) + + def next() = if (hasNext) { + var r: (K, V) = null + if (subiter ne null) { + r = subiter.next() + checkSubiter() + } else { + r = current.kvPair + advance() + } + r + } else Iterator.empty.next() + + private def readin(in: INode[K, V]) = in.GCAS_READ(ct) match { + case cn: CNode[K, V] => + depth += 1 + stack(depth) = cn.array + stackpos(depth) = -1 + advance() + case tn: TNode[K, V] => + current = tn + case ln: LNode[K, V] => + subiter = ln.listmap.iterator + checkSubiter() + case null => + current = null + } + + @inline private def checkSubiter() = if (!subiter.hasNext) { + subiter = null + advance() + } + + @inline private def initialize() { + assert(ct.isReadOnly) + + val r = ct.RDCSS_READ_ROOT() + readin(r) + } + + def advance(): Unit = if (depth >= 0) { + val npos = stackpos(depth) + 1 + if (npos < stack(depth).length) { + stackpos(depth) = npos + stack(depth)(npos) match { + case sn: SNode[K, V] => + current = sn + case in: INode[K, V] => + readin(in) + } + } else { + depth -= 1 + advance() + } + } else current = null + + /** Returns a sequence of iterators over subsets of this iterator. + * It's used to ease the implementation of splitters for a parallel version of the Ctrie. + */ + protected def subdivide: Seq[Iterator[(K, V)]] = if (subiter ne null) { + // the case where an LNode is being iterated + val it = subiter + subiter = null + advance() + Seq(it, this) + } else if (depth == -1) Seq(this) else { + var d = 0 + while (d <= depth) { + val rem = stack(d).length - 1 - stackpos(d) + if (rem > 0) { + val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) + stack(d) = arr1 + stackpos(d) = -1 + val it = new CtrieIterator[K, V](ct, false) + it.stack(0) = arr2 + it.stackpos(0) = -1 + it.depth = 0 + it.advance() // <-- fix it + return Seq(this, it) + } + d += 1 + } + Seq(this) + } + + private def print { + println("ctrie iterator") + println(stackpos.mkString(",")) + println("depth: " + depth) + println("curr.: " + current) + println(stack.mkString("\n")) + } + +} + + +private[mutable] object RestartException extends util.control.ControlThrowable + + +private[mutable] object Debug { + import collection._ + + lazy val logbuffer = new java.util.concurrent.ConcurrentLinkedQueue[AnyRef] + + def log(s: AnyRef) = logbuffer.add(s) + + def flush() { + for (s <- JavaConversions.asScalaIterator(logbuffer.iterator())) Console.out.println(s.toString) + logbuffer.clear() + } + + def clear() { + logbuffer.clear() + } + +} + + + + + + + + + + diff --git a/src/library/scala/collection/mutable/Gen.java b/src/library/scala/collection/mutable/Gen.java new file mode 100644 index 0000000000..0c9a30d198 --- /dev/null +++ b/src/library/scala/collection/mutable/Gen.java @@ -0,0 +1,18 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + + + + +final class Gen { +} + diff --git a/src/library/scala/collection/mutable/INodeBase.java b/src/library/scala/collection/mutable/INodeBase.java new file mode 100644 index 0000000000..487b5cfc28 --- /dev/null +++ b/src/library/scala/collection/mutable/INodeBase.java @@ -0,0 +1,35 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + + + +abstract class INodeBase extends BasicNode { + + public static final AtomicReferenceFieldUpdater updater = AtomicReferenceFieldUpdater.newUpdater(INodeBase.class, MainNode.class, "mainnode"); + + public static final Object RESTART = new Object(); + + public volatile MainNode mainnode = null; + + public final Gen gen; + + public INodeBase(Gen generation) { + gen = generation; + } + + public BasicNode prev() { + return null; + } + +} \ No newline at end of file diff --git a/src/library/scala/collection/mutable/MainNode.java b/src/library/scala/collection/mutable/MainNode.java new file mode 100644 index 0000000000..09bc858edc --- /dev/null +++ b/src/library/scala/collection/mutable/MainNode.java @@ -0,0 +1,36 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + +import java.util.concurrent.atomic.AtomicReferenceFieldUpdater; + + + +abstract class MainNode extends BasicNode { + + public static final AtomicReferenceFieldUpdater updater = AtomicReferenceFieldUpdater.newUpdater(MainNode.class, MainNode.class, "prev"); + + public volatile MainNode prev = null; + + public boolean CAS_PREV(MainNode oldval, MainNode nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_PREV(MainNode nval) { + updater.set(this, nval); + } + + // do we need this? unclear in the javadocs... + public MainNode READ_PREV() { + return updater.get(this); + } + +} \ No newline at end of file diff --git a/test/files/run/ctries/DumbHash.scala b/test/files/run/ctries/DumbHash.scala new file mode 100644 index 0000000000..8ef325b67c --- /dev/null +++ b/test/files/run/ctries/DumbHash.scala @@ -0,0 +1,14 @@ + + + + + + +class DumbHash(val i: Int) { + override def equals(other: Any) = other match { + case that: DumbHash => that.i == this.i + case _ => false + } + override def hashCode = i % 5 + override def toString = "DH(%s)".format(i) +} diff --git a/test/files/run/ctries/Wrap.scala b/test/files/run/ctries/Wrap.scala new file mode 100644 index 0000000000..7b645c1612 --- /dev/null +++ b/test/files/run/ctries/Wrap.scala @@ -0,0 +1,9 @@ + + + + + + +case class Wrap(i: Int) { + override def hashCode = i * 0x9e3775cd +} diff --git a/test/files/run/ctries/concmap.scala b/test/files/run/ctries/concmap.scala new file mode 100644 index 0000000000..85a305ce5b --- /dev/null +++ b/test/files/run/ctries/concmap.scala @@ -0,0 +1,169 @@ + + + +import collection.mutable.Ctrie + + +object ConcurrentMapSpec extends Spec { + + val initsz = 500 + val secondsz = 750 + + def test() { + "support put" in { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until initsz) assert(ct.put(new Wrap(i), i) == None) + for (i <- 0 until initsz) assert(ct.put(new Wrap(i), -i) == Some(i)) + } + + "support put if absent" in { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until initsz) ct.update(new Wrap(i), i) + for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) + for (i <- 0 until initsz) assert(ct.putIfAbsent(new Wrap(i), -i) == Some(i)) + for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), -i) == None) + for (i <- initsz until secondsz) assert(ct.putIfAbsent(new Wrap(i), i) == Some(-i)) + } + + "support remove if mapped to a specific value" in { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until initsz) ct.update(new Wrap(i), i) + for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), -i - 1) == false) + for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == true) + for (i <- 0 until initsz) assert(ct.remove(new Wrap(i), i) == false) + } + + "support replace if mapped to a specific value" in { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until initsz) ct.update(new Wrap(i), i) + for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i - 1, -i - 2) == false) + for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == true) + for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i, -i - 2) == false) + for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i, 0) == false) + } + + "support replace if present" in { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until initsz) ct.update(new Wrap(i), i) + for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), -i) == Some(i)) + for (i <- 0 until initsz) assert(ct.replace(new Wrap(i), i) == Some(-i)) + for (i <- initsz until secondsz) assert(ct.replace(new Wrap(i), i) == None) + } + + def assertEqual(a: Any, b: Any) = { + if (a != b) println(a, b) + assert(a == b) + } + + "support replace if mapped to a specific value, using several threads" in { + val ct = new Ctrie[Wrap, Int] + val sz = 55000 + for (i <- 0 until sz) ct.update(new Wrap(i), i) + + class Updater(index: Int, offs: Int) extends Thread { + override def run() { + var repeats = 0 + for (i <- 0 until sz) { + val j = (offs + i) % sz + var k = Int.MaxValue + do { + if (k != Int.MaxValue) repeats += 1 + k = ct.lookup(new Wrap(j)) + } while (!ct.replace(new Wrap(j), k, -k)) + } + //println("Thread %d repeats: %d".format(index, repeats)) + } + } + + val threads = for (i <- 0 until 16) yield new Updater(i, sz / 32 * i) + threads.foreach(_.start()) + threads.foreach(_.join()) + + for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), i) + + val threads2 = for (i <- 0 until 15) yield new Updater(i, sz / 32 * i) + threads2.foreach(_.start()) + threads2.foreach(_.join()) + + for (i <- 0 until sz) assertEqual(ct(new Wrap(i)), -i) + } + + "support put if absent, several threads" in { + val ct = new Ctrie[Wrap, Int] + val sz = 110000 + + class Updater(offs: Int) extends Thread { + override def run() { + for (i <- 0 until sz) { + val j = (offs + i) % sz + ct.putIfAbsent(new Wrap(j), j) + assert(ct.lookup(new Wrap(j)) == j) + } + } + } + + val threads = for (i <- 0 until 16) yield new Updater(sz / 32 * i) + threads.foreach(_.start()) + threads.foreach(_.join()) + + for (i <- 0 until sz) assert(ct(new Wrap(i)) == i) + } + + "support remove if mapped to a specific value, several threads" in { + val ct = new Ctrie[Wrap, Int] + val sz = 55000 + for (i <- 0 until sz) ct.update(new Wrap(i), i) + + class Remover(offs: Int) extends Thread { + override def run() { + for (i <- 0 until sz) { + val j = (offs + i) % sz + ct.remove(new Wrap(j), j) + assert(ct.get(new Wrap(j)) == None) + } + } + } + + val threads = for (i <- 0 until 16) yield new Remover(sz / 32 * i) + threads.foreach(_.start()) + threads.foreach(_.join()) + + for (i <- 0 until sz) assert(ct.get(new Wrap(i)) == None) + } + + "have all or none of the elements depending on the oddity" in { + val ct = new Ctrie[Wrap, Int] + val sz = 65000 + for (i <- 0 until sz) ct(new Wrap(i)) = i + + class Modifier(index: Int, offs: Int) extends Thread { + override def run() { + for (j <- 0 until sz) { + val i = (offs + j) % sz + var success = false + do { + if (ct.contains(new Wrap(i))) { + success = ct.remove(new Wrap(i)) != None + } else { + success = ct.putIfAbsent(new Wrap(i), i) == None + } + } while (!success) + } + } + } + + def modify(n: Int) = { + val threads = for (i <- 0 until n) yield new Modifier(i, sz / n * i) + threads.foreach(_.start()) + threads.foreach(_.join()) + } + + modify(16) + for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), Some(i)) + modify(15) + for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None) + } + + } + +} diff --git a/test/files/run/ctries/iterator.scala b/test/files/run/ctries/iterator.scala new file mode 100644 index 0000000000..1cef4f66ea --- /dev/null +++ b/test/files/run/ctries/iterator.scala @@ -0,0 +1,279 @@ + + + + +import collection._ +import collection.mutable.Ctrie + + + +object IteratorSpec extends Spec { + + def test() { + "work for an empty trie" in { + val ct = new Ctrie + val it = ct.iterator + + it.hasNext shouldEqual (false) + evaluating { it.next() }.shouldProduce [NoSuchElementException] + } + + def nonEmptyIteratorCheck(sz: Int) { + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct.put(new Wrap(i), i) + + val it = ct.iterator + val tracker = mutable.Map[Wrap, Int]() + for (i <- 0 until sz) { + assert(it.hasNext == true) + tracker += it.next + } + + it.hasNext shouldEqual (false) + evaluating { it.next() }.shouldProduce [NoSuchElementException] + tracker.size shouldEqual (sz) + tracker shouldEqual (ct) + } + + "work for a 1 element trie" in { + nonEmptyIteratorCheck(1) + } + + "work for a 2 element trie" in { + nonEmptyIteratorCheck(2) + } + + "work for a 3 element trie" in { + nonEmptyIteratorCheck(3) + } + + "work for a 5 element trie" in { + nonEmptyIteratorCheck(5) + } + + "work for a 10 element trie" in { + nonEmptyIteratorCheck(10) + } + + "work for a 20 element trie" in { + nonEmptyIteratorCheck(20) + } + + "work for a 50 element trie" in { + nonEmptyIteratorCheck(50) + } + + "work for a 100 element trie" in { + nonEmptyIteratorCheck(100) + } + + "work for a 1k element trie" in { + nonEmptyIteratorCheck(1000) + } + + "work for a 5k element trie" in { + nonEmptyIteratorCheck(5000) + } + + "work for a 75k element trie" in { + nonEmptyIteratorCheck(75000) + } + + "work for a 250k element trie" in { + nonEmptyIteratorCheck(500000) + } + + def nonEmptyCollideCheck(sz: Int) { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until sz) ct.put(new DumbHash(i), i) + + val it = ct.iterator + val tracker = mutable.Map[DumbHash, Int]() + for (i <- 0 until sz) { + assert(it.hasNext == true) + tracker += it.next + } + + it.hasNext shouldEqual (false) + evaluating { it.next() }.shouldProduce [NoSuchElementException] + tracker.size shouldEqual (sz) + tracker shouldEqual (ct) + } + + "work for colliding hashcodes, 2 element trie" in { + nonEmptyCollideCheck(2) + } + + "work for colliding hashcodes, 3 element trie" in { + nonEmptyCollideCheck(3) + } + + "work for colliding hashcodes, 5 element trie" in { + nonEmptyCollideCheck(5) + } + + "work for colliding hashcodes, 10 element trie" in { + nonEmptyCollideCheck(10) + } + + "work for colliding hashcodes, 100 element trie" in { + nonEmptyCollideCheck(100) + } + + "work for colliding hashcodes, 500 element trie" in { + nonEmptyCollideCheck(500) + } + + "work for colliding hashcodes, 5k element trie" in { + nonEmptyCollideCheck(5000) + } + + def assertEqual(a: Map[Wrap, Int], b: Map[Wrap, Int]) { + if (a != b) { + println(a.size + " vs " + b.size) + // println(a) + // println(b) + // println(a.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) + // println(b.toSeq.sortBy((x: (Wrap, Int)) => x._1.i)) + } + assert(a == b) + } + + "be consistent when taken with concurrent modifications" in { + val sz = 25000 + val W = 25 + val S = 10 + val checks = 5 + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct.put(new Wrap(i), i) + + class Modifier extends Thread { + override def run() { + for (i <- 0 until sz) ct.putIfAbsent(new Wrap(i), i) match { + case Some(_) => ct.remove(new Wrap(i)) + case None => + } + } + } + + def consistentIteration(ct: Ctrie[Wrap, Int], checks: Int) { + class Iter extends Thread { + override def run() { + val snap = ct.readOnlySnapshot() + val initial = mutable.Map[Wrap, Int]() + for (kv <- snap) initial += kv + + for (i <- 0 until checks) { + assertEqual(snap.iterator.toMap, initial) + } + } + } + + val iter = new Iter + iter.start() + iter.join() + } + + val threads = for (_ <- 0 until W) yield new Modifier + threads.foreach(_.start()) + for (_ <- 0 until S) consistentIteration(ct, checks) + threads.foreach(_.join()) + } + + "be consistent with a concurrent removal with a well defined order" in { + val sz = 150000 + val sgroupsize = 40 + val sgroupnum = 20 + val removerslowdown = 50 + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct.put(new Wrap(i), i) + + class Remover extends Thread { + override def run() { + for (i <- 0 until sz) { + assert(ct.remove(new Wrap(i)) == Some(i)) + for (i <- 0 until removerslowdown) ct.get(new Wrap(i)) // slow down, mate + } + //println("done removing") + } + } + + def consistentIteration(it: Iterator[(Wrap, Int)]) = { + class Iter extends Thread { + override def run() { + val elems = it.toSeq + if (elems.nonEmpty) { + val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i + assert(elems.forall(_._1.i >= minelem)) + } + } + } + new Iter + } + + val remover = new Remover + remover.start() + for (_ <- 0 until sgroupnum) { + val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) + iters.foreach(_.start()) + iters.foreach(_.join()) + } + //println("done with iterators") + remover.join() + } + + "be consistent with a concurrent insertion with a well defined order" in { + val sz = 150000 + val sgroupsize = 30 + val sgroupnum = 30 + val inserterslowdown = 50 + val ct = new Ctrie[Wrap, Int] + + class Inserter extends Thread { + override def run() { + for (i <- 0 until sz) { + assert(ct.put(new Wrap(i), i) == None) + for (i <- 0 until inserterslowdown) ct.get(new Wrap(i)) // slow down, mate + } + //println("done inserting") + } + } + + def consistentIteration(it: Iterator[(Wrap, Int)]) = { + class Iter extends Thread { + override def run() { + val elems = it.toSeq + if (elems.nonEmpty) { + val maxelem = elems.maxBy((x: (Wrap, Int)) => x._1.i)._1.i + assert(elems.forall(_._1.i <= maxelem)) + } + } + } + new Iter + } + + val inserter = new Inserter + inserter.start() + for (_ <- 0 until sgroupnum) { + val iters = for (_ <- 0 until sgroupsize) yield consistentIteration(ct.iterator) + iters.foreach(_.start()) + iters.foreach(_.join()) + } + //println("done with iterators") + inserter.join() + } + + "work on a yet unevaluated snapshot" in { + val sz = 50000 + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct.update(new Wrap(i), i) + + val snap = ct.snapshot() + val it = snap.iterator + + while (it.hasNext) it.next() + } + + } + +} diff --git a/test/files/run/ctries/lnode.scala b/test/files/run/ctries/lnode.scala new file mode 100644 index 0000000000..28da4cc62f --- /dev/null +++ b/test/files/run/ctries/lnode.scala @@ -0,0 +1,58 @@ + + + +import collection.mutable.Ctrie + + +object LNodeSpec extends Spec { + + val initsz = 1500 + val secondsz = 1750 + + def test() { + "accept elements with the same hash codes" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) ct.update(new DumbHash(i), i) + } + + "lookup elements with the same hash codes" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) ct.update(new DumbHash(i), i) + for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == Some(i)) + for (i <- initsz until secondsz) assert(ct.get(new DumbHash(i)) == None) + } + + "remove elements with the same hash codes" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) ct.update(new DumbHash(i), i) + for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i)) == Some(i)) + for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) + } + + "put elements with the same hash codes if absent" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) ct.put(new DumbHash(i), i) + for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.putIfAbsent(new DumbHash(i), i) == Some(i)) + for (i <- initsz until secondsz) assert(ct.putIfAbsent(new DumbHash(i), i) == None) + for (i <- initsz until secondsz) assert(ct.lookup(new DumbHash(i)) == i) + } + + "replace elements with the same hash codes" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) + for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == i) + for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i) == Some(i)) + for (i <- 0 until initsz) assert(ct.lookup(new DumbHash(i)) == -i) + for (i <- 0 until initsz) assert(ct.replace(new DumbHash(i), -i, i) == true) + } + + "remove elements with the same hash codes if mapped to a specific value" in { + val ct = new Ctrie[DumbHash, Int] + for (i <- 0 until initsz) assert(ct.put(new DumbHash(i), i) == None) + for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i), i) == true) + } + + } + +} diff --git a/test/files/run/ctries/main.scala b/test/files/run/ctries/main.scala new file mode 100644 index 0000000000..8db7fcef54 --- /dev/null +++ b/test/files/run/ctries/main.scala @@ -0,0 +1,45 @@ + + + + + + + +object Test { + + def main(args: Array[String]) { + ConcurrentMapSpec.test() + IteratorSpec.test() + LNodeSpec.test() + SnapshotSpec.test() + } + +} + + +trait Spec { + + implicit def str2ops(s: String) = new { + def in[U](body: =>U) { + // just execute body + body + } + } + + implicit def any2ops(a: Any) = new { + def shouldEqual(other: Any) = assert(a == other) + } + + def evaluating[U](body: =>U) = new { + def shouldProduce[T <: Throwable: ClassManifest]() = { + var produced = false + try body + catch { + case e => if (e.getClass == implicitly[ClassManifest[T]].erasure) produced = true + } finally { + assert(produced, "Did not produce exception of type: " + implicitly[ClassManifest[T]]) + } + } + } + +} diff --git a/test/files/run/ctries/snapshot.scala b/test/files/run/ctries/snapshot.scala new file mode 100644 index 0000000000..69073d3f06 --- /dev/null +++ b/test/files/run/ctries/snapshot.scala @@ -0,0 +1,267 @@ + + + + +import collection._ +import collection.mutable.Ctrie + + + +object SnapshotSpec extends Spec { + + def test() { + "support snapshots" in { + val ctn = new Ctrie + ctn.snapshot() + ctn.readOnlySnapshot() + + val ct = new Ctrie[Int, Int] + for (i <- 0 until 100) ct.put(i, i) + ct.snapshot() + ct.readOnlySnapshot() + } + + "empty 2 quiescent snapshots in isolation" in { + val sz = 4000 + + class Worker(trie: Ctrie[Wrap, Int]) extends Thread { + override def run() { + for (i <- 0 until sz) { + assert(trie.remove(new Wrap(i)) == Some(i)) + for (j <- 0 until sz) + if (j <= i) assert(trie.get(new Wrap(j)) == None) + else assert(trie.get(new Wrap(j)) == Some(j)) + } + } + } + + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct.put(new Wrap(i), i) + val snapt = ct.snapshot() + + val original = new Worker(ct) + val snapshot = new Worker(snapt) + original.start() + snapshot.start() + original.join() + snapshot.join() + + for (i <- 0 until sz) { + assert(ct.get(new Wrap(i)) == None) + assert(snapt.get(new Wrap(i)) == None) + } + } + + def consistentReadOnly(name: String, readonly: Map[Wrap, Int], sz: Int, N: Int) { + @volatile var e: Exception = null + + // reads possible entries once and stores them + // then reads all these N more times to check if the + // state stayed the same + class Reader(trie: Map[Wrap, Int]) extends Thread { + setName("Reader " + name) + + override def run() = + try check() + catch { + case ex: Exception => e = ex + } + + def check() { + val initial = mutable.Map[Wrap, Int]() + for (i <- 0 until sz) trie.get(new Wrap(i)) match { + case Some(i) => initial.put(new Wrap(i), i) + case None => // do nothing + } + + for (k <- 0 until N) { + for (i <- 0 until sz) { + val tres = trie.get(new Wrap(i)) + val ires = initial.get(new Wrap(i)) + if (tres != ires) println(i, "initially: " + ires, "traversal %d: %s".format(k, tres)) + assert(tres == ires) + } + } + } + } + + val reader = new Reader(readonly) + reader.start() + reader.join() + + if (e ne null) { + e.printStackTrace() + throw e + } + } + + // traverses the trie `rep` times and modifies each entry + class Modifier(trie: Ctrie[Wrap, Int], index: Int, rep: Int, sz: Int) extends Thread { + setName("Modifier %d".format(index)) + + override def run() { + for (k <- 0 until rep) { + for (i <- 0 until sz) trie.putIfAbsent(new Wrap(i), i) match { + case Some(_) => trie.remove(new Wrap(i)) + case None => // do nothing + } + } + } + } + + // removes all the elements from the trie + class Remover(trie: Ctrie[Wrap, Int], index: Int, totremovers: Int, sz: Int) extends Thread { + setName("Remover %d".format(index)) + + override def run() { + for (i <- 0 until sz) trie.remove(new Wrap((i + sz / totremovers * index) % sz)) + } + } + + "have a consistent quiescent read-only snapshot" in { + val sz = 10000 + val N = 100 + val W = 10 + + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(new Wrap(i)) = i + val readonly = ct.readOnlySnapshot() + val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) + + threads.foreach(_.start()) + consistentReadOnly("qm", readonly, sz, N) + threads.foreach(_.join()) + } + + // now, we check non-quiescent snapshots, as these permit situations + // where a thread is caught in the middle of the update when a snapshot is taken + + "have a consistent non-quiescent read-only snapshot, concurrent with removes only" in { + val sz = 1250 + val W = 100 + val S = 5000 + + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(new Wrap(i)) = i + val threads = for (i <- 0 until W) yield new Remover(ct, i, W, sz) + + threads.foreach(_.start()) + for (i <- 0 until S) consistentReadOnly("non-qr", ct.readOnlySnapshot(), sz, 5) + threads.foreach(_.join()) + } + + "have a consistent non-quiescent read-only snapshot, concurrent with modifications" in { + val sz = 1000 + val N = 7000 + val W = 10 + val S = 7000 + + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(new Wrap(i)) = i + val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) + + threads.foreach(_.start()) + for (i <- 0 until S) consistentReadOnly("non-qm", ct.readOnlySnapshot(), sz, 5) + threads.foreach(_.join()) + } + + def consistentNonReadOnly(name: String, trie: Ctrie[Wrap, Int], sz: Int, N: Int) { + @volatile var e: Exception = null + + // reads possible entries once and stores them + // then reads all these N more times to check if the + // state stayed the same + class Worker extends Thread { + setName("Worker " + name) + + override def run() = + try check() + catch { + case ex: Exception => e = ex + } + + def check() { + val initial = mutable.Map[Wrap, Int]() + for (i <- 0 until sz) trie.get(new Wrap(i)) match { + case Some(i) => initial.put(new Wrap(i), i) + case None => // do nothing + } + + for (k <- 0 until N) { + // modify + for ((key, value) <- initial) { + val oldv = if (k % 2 == 0) value else -value + val newv = -oldv + trie.replace(key, oldv, newv) + } + + // check + for (i <- 0 until sz) if (initial.contains(new Wrap(i))) { + val expected = if (k % 2 == 0) -i else i + //println(trie.get(new Wrap(i))) + assert(trie.get(new Wrap(i)) == Some(expected)) + } else { + assert(trie.get(new Wrap(i)) == None) + } + } + } + } + + val worker = new Worker + worker.start() + worker.join() + + if (e ne null) { + e.printStackTrace() + throw e + } + } + + "have a consistent non-quiescent snapshot, concurrent with modifications" in { + val sz = 9000 + val N = 1000 + val W = 10 + val S = 400 + + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(new Wrap(i)) = i + val threads = for (i <- 0 until W) yield new Modifier(ct, i, N, sz) + + threads.foreach(_.start()) + for (i <- 0 until S) { + consistentReadOnly("non-qm", ct.snapshot(), sz, 5) + consistentNonReadOnly("non-qsnap", ct.snapshot(), sz, 5) + } + threads.foreach(_.join()) + } + + "work when many concurrent snapshots are taken, concurrent with modifications" in { + val sz = 12000 + val W = 10 + val S = 10 + val modifytimes = 1200 + val snaptimes = 600 + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(new Wrap(i)) = i + + class Snapshooter extends Thread { + setName("Snapshooter") + override def run() { + for (k <- 0 until snaptimes) { + val snap = ct.snapshot() + for (i <- 0 until sz) snap.remove(new Wrap(i)) + for (i <- 0 until sz) assert(!snap.contains(new Wrap(i))) + } + } + } + + val mods = for (i <- 0 until W) yield new Modifier(ct, i, modifytimes, sz) + val shooters = for (i <- 0 until S) yield new Snapshooter + val threads = mods ++ shooters + threads.foreach(_.start()) + threads.foreach(_.join()) + } + + } + +} diff --git a/test/files/scalacheck/Ctrie.scala b/test/files/scalacheck/Ctrie.scala new file mode 100644 index 0000000000..2950937278 --- /dev/null +++ b/test/files/scalacheck/Ctrie.scala @@ -0,0 +1,199 @@ + + + +import org.scalacheck._ +import Prop._ +import org.scalacheck.Gen._ +import collection._ +import collection.mutable.Ctrie + + + +case class Wrap(i: Int) { + override def hashCode = i // * 0x9e3775cd +} + + +/** A check mainly oriented towards checking snapshot correctness. + */ +object Test extends Properties("Ctrie") { + + /* generators */ + + val sizes = choose(0, 200000) + + val threadCounts = choose(2, 16) + + val threadCountsAndSizes = for { + p <- threadCounts + sz <- sizes + } yield (p, sz); + + + /* helpers */ + + def inParallel[T](totalThreads: Int)(body: Int => T): Seq[T] = { + val threads = for (idx <- 0 until totalThreads) yield new Thread { + setName("ParThread-" + idx) + private var res: T = _ + override def run() { + res = body(idx) + } + def result = { + this.join() + res + } + } + + threads foreach (_.start()) + threads map (_.result) + } + + def spawn[T](body: =>T): { def get: T } = { + val t = new Thread { + setName("SpawnThread") + private var res: T = _ + override def run() { + res = body + } + def result = res + } + t.start() + new { + def get: T = { + t.join() + t.result + } + } + } + + def elementRange(threadIdx: Int, totalThreads: Int, totalElems: Int): Range = { + val sz = totalElems + val idx = threadIdx + val p = totalThreads + val start = (sz / p) * idx + math.min(idx, sz % p) + val elems = (sz / p) + (if (idx < sz % p) 1 else 0) + val end = start + elems + (start until end) + } + + def hasGrown[K, V](last: Map[K, V], current: Map[K, V]) = { + (last.size <= current.size) && { + last forall { + case (k, v) => current.get(k) == Some(v) + } + } + } + + object err { + var buffer = new StringBuilder + def println(a: AnyRef) = buffer.append(a.toString).append("\n") + def clear() = buffer.clear() + def flush() = { + Console.out.println(buffer) + clear() + } + } + + + /* properties */ + + property("concurrent growing snapshots") = forAll(threadCounts, sizes) { + (numThreads, numElems) => + val p = 3 //numThreads + val sz = 102 //numElems + val ct = new Ctrie[Wrap, Int] + + // checker + val checker = spawn { + def check(last: Map[Wrap, Int], iterationsLeft: Int): Boolean = { + val current = ct.readOnlySnapshot() + if (!hasGrown(last, current)) false + else if (current.size >= sz) true + else if (iterationsLeft < 0) false + else check(current, iterationsLeft - 1) + } + check(ct.readOnlySnapshot(), 500) + } + + // fillers + inParallel(p) { + idx => + elementRange(idx, p, sz) foreach (i => ct.update(Wrap(i), i)) + } + + // wait for checker to finish + val growing = true//checker.get + + val ok = growing && ((0 until sz) forall { + case i => ct.get(Wrap(i)) == Some(i) + }) + + ok + } + + property("update") = forAll(sizes) { + (n: Int) => + val ct = new Ctrie[Int, Int] + for (i <- 0 until n) ct(i) = i + (0 until n) forall { + case i => ct(i) == i + } + } + + property("concurrent update") = forAll(threadCountsAndSizes) { + case (p, sz) => + val ct = new Ctrie[Wrap, Int] + + inParallel(p) { + idx => + for (i <- elementRange(idx, p, sz)) ct(Wrap(i)) = i + } + + (0 until sz) forall { + case i => ct(Wrap(i)) == i + } + } + + + property("concurrent remove") = forAll(threadCounts, sizes) { + (p, sz) => + val ct = new Ctrie[Wrap, Int] + for (i <- 0 until sz) ct(Wrap(i)) = i + + inParallel(p) { + idx => + for (i <- elementRange(idx, p, sz)) ct.remove(Wrap(i)) + } + + (0 until sz) forall { + case i => ct.get(Wrap(i)) == None + } + } + + + property("concurrent putIfAbsent") = forAll(threadCounts, sizes) { + (p, sz) => + val ct = new Ctrie[Wrap, Int] + + val results = inParallel(p) { + idx => + elementRange(idx, p, sz) find (i => ct.putIfAbsent(Wrap(i), i) != None) + } + + (results forall (_ == None)) && ((0 until sz) forall { + case i => ct.get(Wrap(i)) == Some(i) + }) + } + +} + + + + + + + + + + -- cgit v1.2.3 From d01da30e4c2197ec24e29fb77ce9a62435bff9a4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 11:21:06 -0800 Subject: Updated get-scala-revision to use git describe. Hopefully this will satisfy all version-interesting parties. Version string now looks like this: v2.10.0-M1-0098-gbda61bb7e5-2012-02-01 Review by @dragos and anyone who uses windows (where it definitely won't produce that string, but hopefully it produces some usable string.) --- tools/get-scala-revision | 32 ++++++++++++++++---------------- tools/get-scala-revision.bat | 2 +- 2 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tools/get-scala-revision b/tools/get-scala-revision index b27b6ddc82..8d48c8cb78 100755 --- a/tools/get-scala-revision +++ b/tools/get-scala-revision @@ -2,23 +2,23 @@ # # Usage: get-scala-revision [dir] # Figures out current scala revision of a git clone. -# # If no dir is given, current working dir is used. +# +# Example build version string: +# v2.10.0-M1-0098-g6f1c486d0b-2012-02-01 +# + +[[ $# -eq 0 ]] || cd "$1" -# not like releases come out so often that we are duty-bound -# to recalculate this every time. -# git merge-base v2.8.2 v2.9.1 master -devbase="df13e31bbb" +# the closest tag, obtained separately because we have to +# reconstruct the string around the padded distance. +tag=$(git describe --abbrev=0) -# reimplementing git describe hopefully in a way which works -# without any particular tags, branches, or recent versions of git. -# this is supposed to generate -# dev-NNNN-g -# where NNNN is the number of commits since devbase, which -# is the merge-base of the most recent release and master. -# Presently hardcoded to reduce uncertainty, v2.8.2/v2.9.1/master. -commits=$(git --no-pager log --pretty=oneline $devbase..HEAD | wc -l) -sha=$(git rev-list -n 1 HEAD) -datestr=$(date "+%Y-%m-%d") +# the full string - padding correctness depends on abbrev=10. +described=$(git describe --abbrev=10 --always --tags) -printf "rdev-%s-%s-g%s\n" $commits $datestr ${sha:0:7} +# 016 is rocket-surgically-calibrated to pad the distance from the +# tag to the current commit into a 4-digit number - since maven +# will be treating this as a string, the ide depends on +# 10 being greater than 9 (thus 0010 and 00009.) +printf "%s-%016s-%s\n" "$tag" "${described##${tag}-}" $(date "+%Y-%m-%d") diff --git a/tools/get-scala-revision.bat b/tools/get-scala-revision.bat index f4dc24b71f..48c7cbd94f 100644 --- a/tools/get-scala-revision.bat +++ b/tools/get-scala-revision.bat @@ -15,7 +15,7 @@ if "%*"=="" ( cd %_DIR% if exist .git\NUL ( - git describe HEAD --abbrev=7 --match dev + git describe --abbrev=10 --always --tags ) :end -- cgit v1.2.3 From fbd5efe49cf23b446762dfa5026e8bac82ab04fc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 13:19:46 -0800 Subject: Fixing build string. Version number turning up more than once. Looking at the situation I could only see the former mechanism as trouble. I deleted build.number and in ant am generating the build string only from tools/get-scala-revision. We may need to revisit when the time comes to build a release. --- README.rst | 1 - build.number | 5 ----- build.xml | 13 ++++--------- 3 files changed, 4 insertions(+), 15 deletions(-) delete mode 100644 build.number diff --git a/README.rst b/README.rst index 940d948dd5..383db1c175 100644 --- a/README.rst +++ b/README.rst @@ -14,7 +14,6 @@ part of the repository but are either automatically generated by the build script or user-created if needed. This is not a complete listing. :: scala/ +--build/ Build products output directory for ant. - +--build.number The version number of the current distribution. +--build.xml The main Ant build script. +--dist/ The destination folder for Scala distributions. +--docs/ Documentation and sample code. diff --git a/build.number b/build.number deleted file mode 100644 index 91c7e72c85..0000000000 --- a/build.number +++ /dev/null @@ -1,5 +0,0 @@ -#Tue Sep 11 19:21:09 CEST 2007 -version.minor=10 -version.patch=0 -version.suffix=alpha -version.major=2 diff --git a/build.xml b/build.xml index 0cb728c35f..57d2eed1c0 100644 --- a/build.xml +++ b/build.xml @@ -251,18 +251,13 @@ INITIALISATION - - + + + - + - - - - -- cgit v1.2.3 From 264ff5d5e8dbec4ae2e13bf52e66a965d884b25c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 31 Jan 2012 00:14:47 -0800 Subject: Fix for parser OOM. The scanner performs some sketchy heuristics when it sees an ascii 1A since it may be EOF or it may be part of a literal. Due to this, it failed to detect an unterminated string literal if the opening quote was unicode-escaped, leading to memory exhaustion as it read SUs until the universe ended. We're parsing a fixed input with known length! There's no reason to be guessing about whether a char is EOF. If we're at the end of the file, it's the end of file. Otherwise, it is not the end of the file. --- src/compiler/scala/tools/nsc/ast/parser/Scanners.scala | 10 ++++++---- test/files/neg/unicode-unterminated-quote.check | 4 ++++ test/files/neg/unicode-unterminated-quote.scala | 2 ++ 3 files changed, 12 insertions(+), 4 deletions(-) create mode 100644 test/files/neg/unicode-unterminated-quote.check create mode 100644 test/files/neg/unicode-unterminated-quote.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index 4478fb6128..dae264fffe 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -84,6 +84,8 @@ trait Scanners extends ScannersCommon { abstract class Scanner extends CharArrayReader with TokenData with ScannerCommon { private def isDigit(c: Char) = java.lang.Character isDigit c + + def isAtEnd = charOffset >= buf.length def flush = { charOffset = offset; nextChar(); this } @@ -449,7 +451,7 @@ trait Scanners extends ScannersCommon { case ']' => nextChar(); token = RBRACKET case SU => - if (charOffset >= buf.length) token = EOF + if (isAtEnd) token = EOF else { syntaxError("illegal character") nextChar() @@ -771,10 +773,10 @@ trait Scanners extends ScannersCommon { putChar(ch) } - private def getLitChars(delimiter: Char) = - while (ch != delimiter && (ch != CR && ch != LF && ch != SU || isUnicodeEscape)) { + private def getLitChars(delimiter: Char) = { + while (ch != delimiter && !isAtEnd && (ch != SU && ch != CR && ch != LF || isUnicodeEscape)) getLitChar() - } + } /** read fractional part and exponent of floating point number * if one is present. diff --git a/test/files/neg/unicode-unterminated-quote.check b/test/files/neg/unicode-unterminated-quote.check new file mode 100644 index 0000000000..fc5caa6d7e --- /dev/null +++ b/test/files/neg/unicode-unterminated-quote.check @@ -0,0 +1,4 @@ +unicode-unterminated-quote.scala:2: error: unclosed string literal + val x = /u0022 + ^ +one error found diff --git a/test/files/neg/unicode-unterminated-quote.scala b/test/files/neg/unicode-unterminated-quote.scala new file mode 100644 index 0000000000..bb6eab667f --- /dev/null +++ b/test/files/neg/unicode-unterminated-quote.scala @@ -0,0 +1,2 @@ +class A { + val x = \u0022 \ No newline at end of file -- cgit v1.2.3 From 68218fa678abfce9cce9f764e3cb9463ce6e0b85 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 1 Feb 2012 14:05:25 -0800 Subject: Update RoundingMode. Not to use the deprecated Enumeration constructor. --- src/compiler/scala/tools/nsc/io/Path.scala | 2 +- src/library/scala/math/BigDecimal.scala | 4 +++- 2 files changed, 4 insertions(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/io/Path.scala b/src/compiler/scala/tools/nsc/io/Path.scala index 9efff089ba..a1b8e5e4d5 100644 --- a/src/compiler/scala/tools/nsc/io/Path.scala +++ b/src/compiler/scala/tools/nsc/io/Path.scala @@ -48,7 +48,7 @@ object Path { implicit def jfile2path(jfile: JFile): Path = apply(jfile) // java 7 style, we don't use it yet - // object AccessMode extends Enumeration("AccessMode") { + // object AccessMode extends Enumeration { // val EXECUTE, READ, WRITE = Value // } // def checkAccess(modes: AccessMode*): Boolean = { diff --git a/src/library/scala/math/BigDecimal.scala b/src/library/scala/math/BigDecimal.scala index 497de92c80..c1f45eccfb 100644 --- a/src/library/scala/math/BigDecimal.scala +++ b/src/library/scala/math/BigDecimal.scala @@ -33,8 +33,10 @@ object BigDecimal { /** Cache ony for defaultMathContext using BigDecimals in a small range. */ private lazy val cache = new Array[BigDecimal](maxCached - minCached + 1) - object RoundingMode extends Enumeration(java.math.RoundingMode.values map (_.toString) : _*) with Serializable { + object RoundingMode extends Enumeration { type RoundingMode = Value + // These are supposed to be the same as java.math.RoundingMode.values, + // though it seems unwise to rely on the correspondence. val UP, DOWN, CEILING, FLOOR, HALF_UP, HALF_DOWN, HALF_EVEN, UNNECESSARY = Value } -- cgit v1.2.3 From 1e0707786b118e3e33379e7acdc75306b45e6547 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 31 Jan 2012 12:12:23 +0100 Subject: Hardens classToType logic Reflection now correctly processes classes, objects and inner classes that are declared in classes and objects. However classToType still crashes on compound types and local classes. For more information on those, follow the links: * Compound types: https://issues.scala-lang.org/browse/SI-5430 * Local classes: https://issues.scala-lang.org/browse/SI-5431 Fixes https://issues.scala-lang.org/browse/SI-5256. Review by @paulp, @odersky. --- .../scala/reflect/runtime/JavaToScala.scala | 118 +++++++++++++++------ .../nsc/interpreter/AbstractFileClassLoader.scala | 39 ++++++- .../scala/tools/nsc/interpreter/IMain.scala | 30 +----- test/files/run/t5256a.check | 2 + test/files/run/t5256a.scala | 9 ++ test/files/run/t5256b.check | 2 + test/files/run/t5256b.scala | 8 ++ test/files/run/t5256d.check | 20 ++++ test/files/run/t5256d.scala | 10 ++ test/files/run/t5256e.check | 2 + test/files/run/t5256e.scala | 9 ++ test/files/run/t5256f.check | 4 + test/files/run/t5256f.scala | 19 ++++ test/pending/run/t5256c.check | 0 test/pending/run/t5256c.scala | 10 ++ test/pending/run/t5256g.check | 0 test/pending/run/t5256g.scala | 11 ++ test/pending/run/t5256h.check | 8 ++ test/pending/run/t5256h.scala | 8 ++ 19 files changed, 247 insertions(+), 62 deletions(-) create mode 100644 test/files/run/t5256a.check create mode 100644 test/files/run/t5256a.scala create mode 100644 test/files/run/t5256b.check create mode 100644 test/files/run/t5256b.scala create mode 100644 test/files/run/t5256d.check create mode 100644 test/files/run/t5256d.scala create mode 100644 test/files/run/t5256e.check create mode 100644 test/files/run/t5256e.scala create mode 100644 test/files/run/t5256f.check create mode 100644 test/files/run/t5256f.scala create mode 100644 test/pending/run/t5256c.check create mode 100644 test/pending/run/t5256c.scala create mode 100644 test/pending/run/t5256g.check create mode 100644 test/pending/run/t5256g.scala create mode 100644 test/pending/run/t5256h.check create mode 100644 test/pending/run/t5256h.scala diff --git a/src/compiler/scala/reflect/runtime/JavaToScala.scala b/src/compiler/scala/reflect/runtime/JavaToScala.scala index b4bcc52a23..4c49c0221f 100644 --- a/src/compiler/scala/reflect/runtime/JavaToScala.scala +++ b/src/compiler/scala/reflect/runtime/JavaToScala.scala @@ -241,16 +241,32 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => * The Scala owner of the Scala class corresponding to the Java class `jclazz` */ private def sOwner(jclazz: jClass[_]): Symbol = { - if (jclazz.isMemberClass) - followStatic(classToScala(jclazz.getEnclosingClass), jclazz.getModifiers) - else if (jclazz.isLocalClass) - methodToScala(jclazz.getEnclosingMethod) orElse constrToScala(jclazz.getEnclosingConstructor) - else if (jclazz.isPrimitive || jclazz.isArray) + if (jclazz.isMemberClass) { + val jEnclosingClass = jclazz.getEnclosingClass + val sEnclosingClass = classToScala(jEnclosingClass) + followStatic(sEnclosingClass, jclazz.getModifiers) + } else if (jclazz.isLocalClass) { + val jEnclosingMethod = jclazz.getEnclosingMethod + if (jEnclosingMethod != null) { + methodToScala(jEnclosingMethod) + } else { + val jEnclosingConstructor = jclazz.getEnclosingConstructor + constrToScala(jEnclosingConstructor) + } + } else if (jclazz.isPrimitive || jclazz.isArray) { ScalaPackageClass - else if (jclazz.getPackage != null) - packageToScala(jclazz.getPackage) - else + } else if (jclazz.getPackage != null) { + val jPackage = jclazz.getPackage + packageToScala(jPackage) + } else { + // @eb: a weird classloader might return a null package for something with a non-empty package name + // for example, http://groups.google.com/group/scala-internals/browse_thread/thread/7be09ff8f67a1e5c + // in that case we could invoke packageNameToScala(jPackageName) and, probably, be okay + // however, I think, it's better to blow up, since weirdness of the class loader might bite us elsewhere + val jPackageName = jclazz.getName.substring(0, Math.max(jclazz.getName.lastIndexOf("."), 0)) + assert(jPackageName == "") EmptyPackageClass + } } /** @@ -295,8 +311,10 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => * @return A Scala method object that corresponds to `jmeth`. */ def methodToScala(jmeth: jMethod): Symbol = methodCache.toScala(jmeth) { - val owner = followStatic(classToScala(jmeth.getDeclaringClass), jmeth.getModifiers) - lookup(owner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth) + val jOwner = jmeth.getDeclaringClass + var sOwner = classToScala(jOwner) + sOwner = followStatic(sOwner, jmeth.getModifiers) + lookup(sOwner, jmeth.getName) suchThat (erasesTo(_, jmeth)) orElse jmethodAsScala(jmeth) } /** @@ -344,6 +362,18 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => pkg.moduleClass } + private def scalaSimpleName(jclazz: jClass[_]): TypeName = { + val owner = sOwner(jclazz) + val enclosingClass = jclazz.getEnclosingClass + var prefix = if (enclosingClass != null) enclosingClass.getName else "" + val isObject = owner.isModuleClass && !owner.isPackageClass + if (isObject && !prefix.endsWith(nme.MODULE_SUFFIX_STRING)) prefix += nme.MODULE_SUFFIX_STRING + assert(jclazz.getName.startsWith(prefix)) + var name = jclazz.getName.substring(prefix.length) + name = name.substring(name.lastIndexOf(".") + 1) + newTypeName(name) + } + /** * The Scala class that corresponds to a given Java class. * @param jclazz The Java class @@ -353,28 +383,54 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => */ def classToScala(jclazz: jClass[_]): Symbol = classCache.toScala(jclazz) { val jname = javaTypeName(jclazz) - def lookup = sOwner(jclazz).info.decl(newTypeName(jclazz.getSimpleName)) - - if (jclazz.isMemberClass && !nme.isImplClassName(jname)) { - val sym = lookup - assert(sym.isType, sym+"/"+jclazz+"/"+sOwner(jclazz)+"/"+jclazz.getSimpleName) - sym.asInstanceOf[ClassSymbol] - } - else if (jclazz.isLocalClass || invalidClassName(jname)) { - // local classes and implementation classes not preserved by unpickling - treat as Java - jclassAsScala(jclazz) - } - else if (jclazz.isArray) { - ArrayClass + val owner = sOwner(jclazz) + val simpleName = scalaSimpleName(jclazz) + + val sym = { + def lookup = { + def coreLookup(name: Name): Symbol = { + val sym = owner.info.decl(name) + sym orElse { + if (name.startsWith(nme.NAME_JOIN_STRING)) + coreLookup(name.subName(1, name.length)) + else + NoSymbol + } + } + + if (nme.isModuleName(simpleName)) { + val moduleName = nme.stripModuleSuffix(simpleName).toTermName + val sym = coreLookup(moduleName) + if (sym == NoSymbol) sym else sym.moduleClass + } else { + coreLookup(simpleName) + } + } + + if (jclazz.isMemberClass && !nme.isImplClassName(jname)) { + lookup + } else if (jclazz.isLocalClass || invalidClassName(jname)) { + // local classes and implementation classes not preserved by unpickling - treat as Java + jclassAsScala(jclazz) + } else if (jclazz.isArray) { + ArrayClass + } else javaTypeToValueClass(jclazz) orElse { + // jclazz is top-level - get signature + lookup + // val (clazz, module) = createClassModule( + // sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _)) + // classCache enter (jclazz, clazz) + // clazz + } } - else javaTypeToValueClass(jclazz) orElse { - // jclazz is top-level - get signature - lookup - // val (clazz, module) = createClassModule( - // sOwner(jclazz), newTypeName(jclazz.getSimpleName), new TopClassCompleter(_, _)) - // classCache enter (jclazz, clazz) - // clazz + + if (!sym.isType) { + def msgNoSym = "no symbol could be loaded from %s (scala equivalent is %s) by name %s".format(owner, jclazz, simpleName) + def msgIsNotType = "not a type: symbol %s loaded from %s (scala equivalent is %s) by name %s".format(sym, owner, jclazz, simpleName) + assert(false, if (sym == NoSymbol) msgNoSym else msgIsNotType) } + + sym.asInstanceOf[ClassSymbol] } /** @@ -453,7 +509,7 @@ trait JavaToScala extends ConversionUtil { self: SymbolTable => private def jclassAsScala(jclazz: jClass[_]): Symbol = jclassAsScala(jclazz, sOwner(jclazz)) private def jclassAsScala(jclazz: jClass[_], owner: Symbol): Symbol = { - val name = newTypeName(jclazz.getSimpleName) + val name = scalaSimpleName(jclazz) val completer = (clazz: Symbol, module: Symbol) => new FromJavaClassCompleter(clazz, module, jclazz) val (clazz, module) = createClassModule(owner, name, completer) classCache enter (jclazz, clazz) diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala index 70fa740eeb..3a605975f4 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractFileClassLoader.scala @@ -21,9 +21,6 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) { // private val defined = mutable.Map[String, Class[_]]() - // Widening to public - override def getPackage(name: String) = super.getPackage(name) - override protected def trace = sys.props contains "scala.debug.classloader" @@ -47,6 +44,22 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) } } + protected def dirNameToPath(name: String): String = + name.replace('.', '/') + + protected def findAbstractDir(name: String): AbstractFile = { + var file: AbstractFile = root + val pathParts = dirNameToPath(name) split '/' + + for (dirPart <- pathParts) { + file = file.lookupName(dirPart, true) + if (file == null) + return null + } + + return file + } + override def getResourceAsStream(name: String) = findAbstractFile(name) match { case null => super.getResourceAsStream(name) case file => file.input @@ -78,4 +91,24 @@ class AbstractFileClassLoader(root: AbstractFile, parent: ClassLoader) // case null => super.getResource(name) // case file => new URL(...) // } + + private val packages = mutable.Map[String, Package]() + + override def definePackage(name: String, specTitle: String, specVersion: String, specVendor: String, implTitle: String, implVersion: String, implVendor: String, sealBase: URL): Package = { + throw new UnsupportedOperationException() + } + + override def getPackage(name: String): Package = { + findAbstractDir(name) match { + case null => super.getPackage(name) + case file => packages.getOrElseUpdate(name, { + val ctor = classOf[Package].getDeclaredConstructor(classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[String], classOf[URL], classOf[ClassLoader]) + ctor.setAccessible(true) + ctor.newInstance(name, null, null, null, null, null, null, null, this) + }) + } + } + + override def getPackages(): Array[Package] = + root.iterator.filter(_.isDirectory).map(dir => getPackage(dir.name)).toArray } diff --git a/src/compiler/scala/tools/nsc/interpreter/IMain.scala b/src/compiler/scala/tools/nsc/interpreter/IMain.scala index 4ccea8afd6..6ae8d0e7d0 100644 --- a/src/compiler/scala/tools/nsc/interpreter/IMain.scala +++ b/src/compiler/scala/tools/nsc/interpreter/IMain.scala @@ -196,7 +196,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends def foreach[U](f: Tree => U): Unit = t foreach { x => f(x) ; () } }).toList } - + implicit def installReplTypeOps(tp: Type): ReplTypeOps = new ReplTypeOps(tp) class ReplTypeOps(tp: Type) { def orElse(other: => Type): Type = if (tp ne NoType) tp else other @@ -314,26 +314,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends private class TranslatingClassLoader(parent: ClassLoader) extends AbstractFileClassLoader(virtualDirectory, parent) { private[IMain] var traceClassLoading = isReplTrace override protected def trace = super.trace || traceClassLoading - - private val packages = mutable.HashMap[String, Package]() - private def enclosingPackageNames(name: String): List[String] = - (name split '.').inits.toList drop 1 dropRight 1 map (_ mkString ".") reverse - - // Here's what all those params to definePackage are after the package name: - // - // specTitle - The specification title - // specVersion - The specification version - // specVendor - The specification vendor - // implTitle - The implementation title - // implVersion - The implementation version - // implVendor - The implementation vendor - // sealBase - If not null, then this package is sealed with respect to the given code source URL object. Otherwise, the package is not sealed. - private def addPackageNames(name: String) { - enclosingPackageNames(name) filterNot (packages contains _) foreach { p => - packages(p) = definePackage(p, "", "", "", "", "", "", null) - repltrace("Added " + packages(p) + " to repl classloader.") - } - } /** Overridden here to try translating a simple name to the generated * class name if the original attempt fails. This method is used by @@ -348,12 +328,6 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends file } } - override def findClass(name: String): JClass = { - val clazz = super.findClass(name) - if (clazz ne null) - addPackageNames(clazz.getName) - clazz - } } private def makeClassLoader(): AbstractFileClassLoader = new TranslatingClassLoader(parentClassLoader match { @@ -1104,7 +1078,7 @@ class IMain(initialSettings: Settings, protected val out: JPrintWriter) extends val clazz = classOfTerm(id) getOrElse { return NoType } val staticSym = tpe.typeSymbol val runtimeSym = getClassIfDefined(clazz.getName) - + if ((runtimeSym != NoSymbol) && (runtimeSym != staticSym) && (runtimeSym isSubClass staticSym)) runtimeSym.info else NoType diff --git a/test/files/run/t5256a.check b/test/files/run/t5256a.check new file mode 100644 index 0000000000..304f4ddd79 --- /dev/null +++ b/test/files/run/t5256a.check @@ -0,0 +1,2 @@ +A +true diff --git a/test/files/run/t5256a.scala b/test/files/run/t5256a.scala new file mode 100644 index 0000000000..05a935c770 --- /dev/null +++ b/test/files/run/t5256a.scala @@ -0,0 +1,9 @@ +import scala.reflect.mirror._ + +class A + +object Test extends App { + val c = classToType(classOf[A]) + println(c) + println(c.typeSymbol == classToSymbol(classOf[A])) +} diff --git a/test/files/run/t5256b.check b/test/files/run/t5256b.check new file mode 100644 index 0000000000..64f4c01166 --- /dev/null +++ b/test/files/run/t5256b.check @@ -0,0 +1,2 @@ +Test.A +true \ No newline at end of file diff --git a/test/files/run/t5256b.scala b/test/files/run/t5256b.scala new file mode 100644 index 0000000000..5575211641 --- /dev/null +++ b/test/files/run/t5256b.scala @@ -0,0 +1,8 @@ +import scala.reflect.mirror._ + +object Test extends App { + class A + val c = classToType(classOf[A]) + println(c) + println(c.typeSymbol == classToSymbol(classOf[A])) +} diff --git a/test/files/run/t5256d.check b/test/files/run/t5256d.check new file mode 100644 index 0000000000..7924c15c5c --- /dev/null +++ b/test/files/run/t5256d.check @@ -0,0 +1,20 @@ +Type in expressions to have them evaluated. +Type :help for more information. + +scala> + +scala> import scala.reflect.mirror._ +import scala.reflect.mirror._ + +scala> class A +defined class A + +scala> val c = classToType(classOf[A]) +c: reflect.mirror.Type = A + +scala> println(c.typeSymbol == classToSymbol(classOf[A])) +true + +scala> + +scala> diff --git a/test/files/run/t5256d.scala b/test/files/run/t5256d.scala new file mode 100644 index 0000000000..86404a9b63 --- /dev/null +++ b/test/files/run/t5256d.scala @@ -0,0 +1,10 @@ +import scala.tools.partest.ReplTest + +object Test extends ReplTest { + def code = """ +import scala.reflect.mirror._ +class A +val c = classToType(classOf[A]) +println(c.typeSymbol == classToSymbol(classOf[A])) + """ +} diff --git a/test/files/run/t5256e.check b/test/files/run/t5256e.check new file mode 100644 index 0000000000..e50f917e14 --- /dev/null +++ b/test/files/run/t5256e.check @@ -0,0 +1,2 @@ +C.this.A +true \ No newline at end of file diff --git a/test/files/run/t5256e.scala b/test/files/run/t5256e.scala new file mode 100644 index 0000000000..9ed422ca44 --- /dev/null +++ b/test/files/run/t5256e.scala @@ -0,0 +1,9 @@ +import scala.reflect.mirror._ + +class C { class A } + +object Test extends App { + val c = classToType(classOf[C#A]) + println(c) + println(c.typeSymbol == classToSymbol(classOf[C#A])) +} diff --git a/test/files/run/t5256f.check b/test/files/run/t5256f.check new file mode 100644 index 0000000000..ad2f375d9a --- /dev/null +++ b/test/files/run/t5256f.check @@ -0,0 +1,4 @@ +Test.A1 +true +Test.this.A2 +true diff --git a/test/files/run/t5256f.scala b/test/files/run/t5256f.scala new file mode 100644 index 0000000000..45c80cbd63 --- /dev/null +++ b/test/files/run/t5256f.scala @@ -0,0 +1,19 @@ +import scala.reflect.mirror._ + +object Test extends App { + class A1 + + val c1 = classToType(classOf[A1]) + println(c1) + println(c1.typeSymbol == classToSymbol(classOf[A1])) + + new Test +} + +class Test { + class A2 + + val c2 = classToType(classOf[A2]) + println(c2) + println(c2.typeSymbol == classToSymbol(classOf[A2])) +} diff --git a/test/pending/run/t5256c.check b/test/pending/run/t5256c.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/t5256c.scala b/test/pending/run/t5256c.scala new file mode 100644 index 0000000000..8ebb51a009 --- /dev/null +++ b/test/pending/run/t5256c.scala @@ -0,0 +1,10 @@ +import scala.reflect.mirror._ + +object Test extends App { + { + class A + val c = classToType(classOf[A]) + println(c) + println(c.typeSymbol == classToSymbol(classOf[A])) + } +} diff --git a/test/pending/run/t5256g.check b/test/pending/run/t5256g.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/t5256g.scala b/test/pending/run/t5256g.scala new file mode 100644 index 0000000000..6158a9281d --- /dev/null +++ b/test/pending/run/t5256g.scala @@ -0,0 +1,11 @@ +import scala.reflect.mirror._ + +class A +trait B + +object Test extends App { + val mutant = new A with B + val c = classToType(mutant.getClass) + println(c) + println(c.typeSymbol == classToSymbol(mutant.getClass)) +} diff --git a/test/pending/run/t5256h.check b/test/pending/run/t5256h.check new file mode 100644 index 0000000000..4f9b8faf71 --- /dev/null +++ b/test/pending/run/t5256h.check @@ -0,0 +1,8 @@ +import scala.reflect.mirror._ + +object Test extends App { + val mutant = new { val x = 2 } + val c = classToType(mutant.getClass) + println(c) + println(c.typeSymbol == classToSymbol(mutant.getClass)) +} diff --git a/test/pending/run/t5256h.scala b/test/pending/run/t5256h.scala new file mode 100644 index 0000000000..4f9b8faf71 --- /dev/null +++ b/test/pending/run/t5256h.scala @@ -0,0 +1,8 @@ +import scala.reflect.mirror._ + +object Test extends App { + val mutant = new { val x = 2 } + val c = classToType(mutant.getClass) + println(c) + println(c.typeSymbol == classToSymbol(mutant.getClass)) +} -- cgit v1.2.3 From 610027b3c50c6a46b26bcfe71013cebc172c146b Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 31 Jan 2012 15:02:48 +0100 Subject: Hardens reification against rare kinds of Constants Importers now correctly process constants that carry types and symbols. However, it is still impossible to reify classOf for a class/trait that is defined inside a quasiquote. Theoretically, this can be implemented, but will require attaching original trees to classOf constants, which needs much more effort. --- .../scala/reflect/internal/Constants.scala | 2 +- .../scala/reflect/internal/Importers.scala | 17 ++++++++----- .../scala/tools/nsc/transform/LiftCode.scala | 28 +++++++++++++++++++--- test/files/run/t5258a.check | 1 + test/files/run/t5258a.scala | 14 +++++++++++ test/pending/run/t5258b.check | 1 + test/pending/run/t5258b.scala | 15 ++++++++++++ test/pending/run/t5258c.check | 1 + test/pending/run/t5258c.scala | 15 ++++++++++++ 9 files changed, 84 insertions(+), 10 deletions(-) create mode 100644 test/files/run/t5258a.check create mode 100644 test/files/run/t5258a.scala create mode 100644 test/pending/run/t5258b.check create mode 100644 test/pending/run/t5258b.scala create mode 100644 test/pending/run/t5258c.check create mode 100644 test/pending/run/t5258c.scala diff --git a/src/compiler/scala/reflect/internal/Constants.scala b/src/compiler/scala/reflect/internal/Constants.scala index 9c4b2b2245..c328cc49cb 100644 --- a/src/compiler/scala/reflect/internal/Constants.scala +++ b/src/compiler/scala/reflect/internal/Constants.scala @@ -45,7 +45,7 @@ trait Constants extends api.Constants { case x: Char => CharTag case x: Type => ClassTag case x: Symbol => EnumTag - case _ => throw new Error("bad constant value: " + value) + case _ => throw new Error("bad constant value: " + value + " of class " + value.getClass) } def isByteRange: Boolean = isIntRange && Byte.MinValue <= intValue && intValue <= Byte.MaxValue diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index 23b443919a..4f5b28d370 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -145,8 +145,8 @@ trait Importers { self: SymbolTable => PolyType(tparams map importSymbol, importType(restpe)) case from.NullaryMethodType(restpe) => NullaryMethodType(importType(restpe)) - case from.ConstantType(from.Constant(value)) => - ConstantType(Constant(value)) + case from.ConstantType(constant @ from.Constant(_)) => + ConstantType(importConstant(constant)) case from.SuperType(thistpe, supertpe) => SuperType(importType(thistpe), importType(supertpe)) case from.TypeBounds(lo, hi) => @@ -194,8 +194,8 @@ trait Importers { self: SymbolTable => }) def importAnnotArg(arg: from.ClassfileAnnotArg): ClassfileAnnotArg = arg match { - case from.LiteralAnnotArg(from.Constant(value)) => - LiteralAnnotArg(Constant(value)) + case from.LiteralAnnotArg(constant @ from.Constant(_)) => + LiteralAnnotArg(importConstant(constant)) case from.ArrayAnnotArg(args) => ArrayAnnotArg(args map importAnnotArg) case from.ScalaSigBytes(bytes) => @@ -303,8 +303,8 @@ trait Importers { self: SymbolTable => case _ => new Ident(importName(name)) } - case from.Literal(from.Constant(value)) => - new Literal(Constant(value)) + case from.Literal(constant @ from.Constant(_)) => + new Literal(importConstant(constant)) case from.TypeTree() => new TypeTree() case from.Annotated(annot, arg) => @@ -339,5 +339,10 @@ trait Importers { self: SymbolTable => def importRefTree(tree: from.RefTree): RefTree = importTree(tree).asInstanceOf[RefTree] def importIdent(tree: from.Ident): Ident = importTree(tree).asInstanceOf[Ident] def importCaseDef(tree: from.CaseDef): CaseDef = importTree(tree).asInstanceOf[CaseDef] + def importConstant(constant: from.Constant): Constant = new Constant(constant.tag match { + case ClassTag => importType(constant.value.asInstanceOf[from.Type]) + case EnumTag => importSymbol(constant.value.asInstanceOf[from.Symbol]) + case _ => constant.value + }) } } diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala index c5475fa0f2..f1182fc2a9 100644 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala @@ -129,7 +129,13 @@ abstract class LiftCode extends Transform with TypingTransformers { if (reifyCopypaste) printCopypaste(result) result } - } finally printTypings = saved + } catch { + case ex: ReifierError => + unit.error(ex.pos, ex.msg) + tree + } finally { + printTypings = saved + } case _ => super.transform(tree) } @@ -396,6 +402,10 @@ abstract class LiftCode extends Transform with TypingTransformers { if (thereAreOnlyTTs && ttsAreNotEssential) reifyTree(hk) else reifyProduct(ta) case global.emptyValDef => mirrorSelect(nme.emptyValDef) + case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) => + CannotReifyClassOfBoundType(tree, tpe) + case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym => + CannotReifyClassOfBoundEnum(tree, constant.tpe) case _ => if (tree.isDef) boundSyms += tree.symbol @@ -494,8 +504,20 @@ abstract class LiftCode extends Transform with TypingTransformers { symDefs.toList ++ fillIns.toList } + } + + /** A throwable signalling a reification error */ + class ReifierError(var pos: Position, val msg: String) extends Throwable(msg) { + def this(msg: String) = this(NoPosition, msg) + } + + def CannotReifyClassOfBoundType(tree: Tree, tpe: Type) = { + val msg = "cannot reify classOf[%s] which refers to a type declared inside the block being reified".format(tpe) + throw new ReifierError(tree.pos, msg) + } - private def cannotReify(value: Any): Nothing = - abort("don't know how to reify " + value + " of " + value.getClass) + def CannotReifyClassOfBoundEnum(tree: Tree, tpe: Type) = { + val msg = "cannot reify classOf[%s] which refers to an enum declared inside the block being reified".format(tpe) + throw new ReifierError(tree.pos, msg) } } diff --git a/test/files/run/t5258a.check b/test/files/run/t5258a.check new file mode 100644 index 0000000000..4e0b2da04c --- /dev/null +++ b/test/files/run/t5258a.check @@ -0,0 +1 @@ +int \ No newline at end of file diff --git a/test/files/run/t5258a.scala b/test/files/run/t5258a.scala new file mode 100644 index 0000000000..deabb8310f --- /dev/null +++ b/test/files/run/t5258a.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + println(classOf[Int]) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} \ No newline at end of file diff --git a/test/pending/run/t5258b.check b/test/pending/run/t5258b.check new file mode 100644 index 0000000000..283b4225fb --- /dev/null +++ b/test/pending/run/t5258b.check @@ -0,0 +1 @@ +TBI \ No newline at end of file diff --git a/test/pending/run/t5258b.scala b/test/pending/run/t5258b.scala new file mode 100644 index 0000000000..70cb4a7f4e --- /dev/null +++ b/test/pending/run/t5258b.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C + println(classOf[C]) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} \ No newline at end of file diff --git a/test/pending/run/t5258c.check b/test/pending/run/t5258c.check new file mode 100644 index 0000000000..283b4225fb --- /dev/null +++ b/test/pending/run/t5258c.check @@ -0,0 +1 @@ +TBI \ No newline at end of file diff --git a/test/pending/run/t5258c.scala b/test/pending/run/t5258c.scala new file mode 100644 index 0000000000..a93170d0d6 --- /dev/null +++ b/test/pending/run/t5258c.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object E extends Enumeration { val foo, bar = Value } + println(E.foo) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} \ No newline at end of file -- cgit v1.2.3 From 4a083558bd20a46b0b29ed0b5b5ec7a0a1f29888 Mon Sep 17 00:00:00 2001 From: Hubert Plociniczak Date: Thu, 2 Feb 2012 12:06:34 +0100 Subject: Fix sbt build with trunk. This was tricky to find as HLists and multiple chains of implicits are definitely not fun to debug. Reporting ambiguous errors is influenced by the general error reporting, don't look for implicit arguments if any of the preceding ones failed (kills performance, causes diverging implicits with HLists). Previously throwing type errors handled that correctly but now we don't do that. Fixed small but essential typo when typing implicit. Review by @dragos --- src/compiler/scala/tools/nsc/typechecker/Implicits.scala | 2 +- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 12 +++++++++--- 2 files changed, 10 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index eaf1b1ffbc..036e7fc750 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -625,7 +625,7 @@ trait Implicits { if (context.hasErrors) fail("typing TypeApply reported errors for the implicit tree") else { - val result = new SearchResult(checked, subst) + val result = new SearchResult(itree2, subst) incCounter(foundImplicits) printInference("[success] found %s for pt %s".format(result, ptInstantiated)) result diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d3ff331f98..a90067a56c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -100,6 +100,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case MethodType(params, _) => val argResultsBuff = new ListBuffer[SearchResult]() val argBuff = new ListBuffer[Tree]() + var paramFailed = false def mkPositionalArg(argTree: Tree, paramName: Name) = argTree def mkNamedArg(argTree: Tree, paramName: Name) = atPos(argTree.pos)(new AssignOrNamedArg(Ident(paramName), (argTree))) @@ -114,14 +115,14 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { for(ar <- argResultsBuff) paramTp = paramTp.subst(ar.subst.from, ar.subst.to) - val res = inferImplicit(fun, paramTp, true, false, context) + val res = if (paramFailed) SearchFailure else inferImplicit(fun, paramTp, context.reportErrors, false, context) argResultsBuff += res if (res != SearchFailure) { argBuff += mkArg(res.tree, param.name) } else { mkArg = mkNamedArg // don't pass the default argument (if any) here, but start emitting named arguments for the following args - if (!param.hasDefault) { + if (!param.hasDefault && !paramFailed) { context.errBuffer.find(_.kind == ErrorKinds.Divergent) match { case Some(divergentImplicit) => // DivergentImplicit error has higher priority than "no implicit found" @@ -133,6 +134,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { case None => NoImplicitFoundError(fun, param) } + paramFailed = true } /* else { TODO: alternative (to expose implicit search failure more) --> @@ -767,7 +769,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { withCondConstrTyper(treeInfo.isSelfOrSuperConstrCall(tree)){ typer1 => if (original != EmptyTree && pt != WildcardType) - typer1.silent(tpr => tpr.typed(tpr.applyImplicitArgs(tree), mode, pt)) match { + typer1.silent(tpr => { + val withImplicitArgs = tpr.applyImplicitArgs(tree) + if (tpr.context.hasErrors) tree // silent will wrap it in SilentTypeError anyway + else tpr.typed(withImplicitArgs, mode, pt) + }) match { case SilentResultValue(result) => result case _ => -- cgit v1.2.3 From c3d19c58d8a94b7232718321f6994c001257cc96 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Thu, 2 Feb 2012 14:05:26 +0100 Subject: Incorporate Ctrie into standard library. Implemented Ctrie serialization. Improved hashcode computation. --- src/library/scala/collection/mutable/Ctrie.scala | 103 ++++++++++++++++++++--- test/files/jvm/serialization.check | 4 + test/files/jvm/serialization.scala | 7 +- test/files/run/ctries/lnode.scala | 5 +- 4 files changed, 106 insertions(+), 13 deletions(-) diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index d02e0ce178..84cceb44eb 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -6,12 +6,14 @@ ** |/ ** \* */ -package scala.collection.mutable +package scala.collection +package mutable import java.util.concurrent.atomic._ import collection.immutable.{ ListMap => ImmutableListMap } +import generic._ import annotation.tailrec import annotation.switch @@ -425,7 +427,7 @@ extends MainNode[K, V] { if (updmap.size > 1) new LNode(updmap) else { val (k, v) = updmap.iterator.next - new TNode(k, v, k.hashCode) // create it tombed so that it gets compressed on subsequent accesses + new TNode(k, v, Ctrie.computeHash(k)) // create it tombed so that it gets compressed on subsequent accesses } } def get(k: K) = listmap.get(k) @@ -568,10 +570,26 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai } -class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef]) +/** A concurrent hash-trie or Ctrie is a concurrent thread-safe lock-free + * implementation of a hash array mapped trie. It is used to implement the + * concurrent map abstraction. It has particularly scalable concurrent insert + * and remove operations and is memory-efficient. It supports O(1), atomic, + * lock-free snapshots which are used to implement linearizable lock-free size, + * iterator and clear operations. The cost of evaluating the (lazy) snapshot is + * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. + * + * @author Aleksandar Prokopec + * @since 2.10 + */ +@SerialVersionUID(0L - 6402774413839597105L) +final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef]) extends ConcurrentMap[K, V] + with MapLike[K, V, Ctrie[K, V]] + with Serializable { - private val rootupdater = rtupd + import Ctrie.computeHash + + private var rootupdater = rtupd @volatile var root = r def this() = this( @@ -581,6 +599,31 @@ extends ConcurrentMap[K, V] /* internal methods */ + private def writeObject(out: java.io.ObjectOutputStream) { + val it = iterator + while (it.hasNext) { + val (k, v) = it.next() + out.writeObject(k) + out.writeObject(v) + } + out.writeObject(CtrieSerializationEnd) + } + + private def readObject(in: java.io.ObjectInputStream) { + root = INode.newRootNode + rootupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[Ctrie[K, V]], classOf[AnyRef], "root") + + var obj: AnyRef = null + do { + obj = in.readObject() + if (obj != CtrieSerializationEnd) { + val k = obj.asInstanceOf[K] + val v = in.readObject().asInstanceOf[V] + update(k, v) + } + } while (obj != CtrieSerializationEnd) + } + @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { @@ -623,10 +666,6 @@ extends ConcurrentMap[K, V] } else false } - @inline private def computeHash(k: K): Int = { - k.hashCode - } - @tailrec private def inserthc(k: K, hc: Int, v: V) { val r = RDCSS_READ_ROOT() if (!r.rec_insert(k, v, hc, 0, null, r.gen, this)) inserthc(k, hc, v) @@ -647,7 +686,7 @@ extends ConcurrentMap[K, V] else res } - /* + /* slower: //@tailrec private def lookuphc(k: K, hc: Int): AnyRef = { val r = RDCSS_READ_ROOT() @@ -671,10 +710,21 @@ extends ConcurrentMap[K, V] /* public methods */ + override def empty: Ctrie[K, V] = new Ctrie[K, V] + @inline final def isReadOnly = rootupdater eq null @inline final def nonReadOnly = rootupdater ne null + /** Returns a snapshot of this Ctrie. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * in the snapshot or this Ctrie are accessed, they are rewritten. + * This means that the work of rebuilding both the snapshot and this + * Ctrie is distributed across all the threads doing updates or accesses + * subsequent to the snapshot creation. + */ @tailrec final def snapshot(): Ctrie[K, V] = { val r = RDCSS_READ_ROOT() val expmain = r.GCAS_READ(this) @@ -682,6 +732,18 @@ extends ConcurrentMap[K, V] else snapshot() } + /** Returns a read-only snapshot of this Ctrie. + * This operation is lock-free and linearizable. + * + * The snapshot is lazily updated - the first time some branch + * of this Ctrie are accessed, it is rewritten. The work of creating + * the snapshot is thus distributed across subsequent updates + * and accesses on this Ctrie by all threads. + * Note that the snapshot itself is never rewritten unlike when calling + * the `snapshot` method, but the obtained snapshot cannot be modified. + * + * This method is used by other methods such as `size` and `iterator`. + */ @tailrec final def readOnlySnapshot(): collection.Map[K, V] = { val r = RDCSS_READ_ROOT() val expmain = r.GCAS_READ(this) @@ -760,11 +822,25 @@ extends ConcurrentMap[K, V] if (nonReadOnly) readOnlySnapshot().iterator else new CtrieIterator(this) + override def stringPrefix = "Ctrie" + } -object Ctrie { - val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[AnyRef], "mainnode") +object Ctrie extends MutableMapFactory[Ctrie] { + val inodeupdater = AtomicReferenceFieldUpdater.newUpdater(classOf[INodeBase[_, _]], classOf[MainNode[_, _]], "mainnode") + + implicit def canBuildFrom[K, V]: CanBuildFrom[Coll, (K, V), Ctrie[K, V]] = new MapCanBuildFrom[K, V] + + def empty[K, V]: Ctrie[K, V] = new Ctrie[K, V] + + @inline final def computeHash[K](k: K): Int = { + var hcode = k.hashCode + hcode = hcode * 0x9e3775cd + hcode = java.lang.Integer.reverseBytes(hcode) + hcode * 0x9e3775cd + } + } @@ -877,6 +953,11 @@ private[mutable] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = private[mutable] object RestartException extends util.control.ControlThrowable +/** Only used for ctrie serialization. */ +@SerialVersionUID(0L - 7237891413820527142L) +private[mutable] case object CtrieSerializationEnd + + private[mutable] object Debug { import collection._ diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index f58f763a76..cdfc100e0d 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -192,6 +192,10 @@ x = TreeSet(1, 2, 3) y = TreeSet(1, 2, 3) x equals y: true, y equals x: true +x = Ctrie(1 -> one, 2 -> two, 3 -> three) +y = Ctrie(1 -> one, 2 -> two, 3 -> three) +x equals y: true, y equals x: true + x = xml:src="hello" y = xml:src="hello" x equals y: true, y equals x: true diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala index 73bed2d46b..4e1ff368ab 100644 --- a/test/files/jvm/serialization.scala +++ b/test/files/jvm/serialization.scala @@ -286,7 +286,7 @@ object Test3_mutable { import scala.collection.mutable.{ ArrayBuffer, ArrayBuilder, ArraySeq, ArrayStack, BitSet, DoubleLinkedList, HashMap, HashSet, History, LinkedList, ListBuffer, Publisher, Queue, - Stack, StringBuilder, WrappedArray, TreeSet} + Stack, StringBuilder, WrappedArray, TreeSet, Ctrie} // in alphabetic order try { @@ -385,6 +385,11 @@ object Test3_mutable { val ts1 = TreeSet[Int]() ++= Array(1, 2, 3) val _ts1: TreeSet[Int] = read(write(ts1)) check(ts1, _ts1) + + // Ctrie + val ct1 = Ctrie[Int, String]() ++= Array(1 -> "one", 2 -> "two", 3 -> "three") + val _ct1: Ctrie[Int, String] = read(write(ct1)) + check(ct1, _ct1) } catch { case e: Exception => diff --git a/test/files/run/ctries/lnode.scala b/test/files/run/ctries/lnode.scala index 28da4cc62f..88cbeed1f6 100644 --- a/test/files/run/ctries/lnode.scala +++ b/test/files/run/ctries/lnode.scala @@ -25,7 +25,10 @@ object LNodeSpec extends Spec { "remove elements with the same hash codes" in { val ct = new Ctrie[DumbHash, Int] for (i <- 0 until initsz) ct.update(new DumbHash(i), i) - for (i <- 0 until initsz) assert(ct.remove(new DumbHash(i)) == Some(i)) + for (i <- 0 until initsz) { + val remelem = ct.remove(new DumbHash(i)) + assert(remelem == Some(i), "removing " + i + " yields " + remelem) + } for (i <- 0 until initsz) assert(ct.get(new DumbHash(i)) == None) } -- cgit v1.2.3 From d940371bd50098c4146e52941880ccdbcb4ea47a Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 2 Feb 2012 14:39:44 +0100 Subject: Miscellaneous fixes to reification More specifically: * Importers now preserve wasEmpty and original * ToolBoxes no longer auto-evaluate nullary functions returned by runExpr * All local symbols from previous typechecks are now correctly erased by ResetAttrs * Originals are now reified --- .../scala/reflect/internal/Importers.scala | 15 ++- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 49 ++++++---- src/compiler/scala/tools/nsc/ast/Trees.scala | 107 ++++++++++++--------- .../scala/tools/nsc/settings/ScalaSettings.scala | 4 +- .../scala/tools/nsc/transform/LiftCode.scala | 62 +++++++++--- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- .../scala/reflect/api/StandardDefinitions.scala | 7 +- .../scala/tools/partest/utils/CodeTest.scala | 10 +- test/files/run/code.check | 7 ++ test/files/run/reify_complex.check | 1 + test/files/run/reify_complex.scala | 31 ++++++ test/files/run/reify_extendbuiltins.check | 1 + test/files/run/reify_extendbuiltins.scala | 21 ++++ test/files/run/reify_generic2.check | 1 + test/files/run/reify_generic2.scala | 16 +++ test/files/run/reify_getter.check | 1 + test/files/run/reify_getter.scala | 19 ++++ test/files/run/reify_sort1.check | 2 + test/files/run/reify_sort1.scala | 27 ++++++ test/files/run/t5269.check | 1 + test/files/run/t5269.scala | 22 +++++ test/files/run/t5274_1.check | 3 + test/files/run/t5274_1.scala | 20 ++++ test/files/run/t5275.check | 1 + test/files/run/t5275.scala | 15 +++ test/files/run/t5277_1.check | 1 + test/files/run/t5277_1.scala | 21 ++++ test/files/run/t5277_2.check | 2 + test/files/run/t5277_2.scala | 18 ++++ test/files/run/t5335.check | 1 + test/files/run/t5335.scala | 14 +++ test/pending/run/reify_closure6.check | 6 +- test/pending/run/reify_closure6.scala | 2 + test/pending/run/reify_closure7.check | 6 ++ test/pending/run/reify_closure7.scala | 32 ++++++ test/pending/run/reify_closure8a.check | 1 + test/pending/run/reify_closure8a.scala | 17 ++++ test/pending/run/reify_closure8b.check | 1 + test/pending/run/reify_closure8b.scala | 17 ++++ test/pending/run/reify_closure9a.check | 1 + test/pending/run/reify_closure9a.scala | 20 ++++ test/pending/run/reify_closure9b.check | 1 + test/pending/run/reify_closure9b.scala | 20 ++++ test/pending/run/reify_closures10.check | 2 + test/pending/run/reify_closures10.scala | 15 +++ test/pending/run/reify_closures11.check | 1 + test/pending/run/reify_closures11.scala | 18 ++++ test/pending/run/reify_complex.check | 1 - test/pending/run/reify_complex.scala | 31 ------ test/pending/run/reify_extendbuiltins.check | 1 - test/pending/run/reify_extendbuiltins.scala | 21 ---- test/pending/run/reify_sort1.check | 2 - test/pending/run/reify_sort1.scala | 27 ------ test/pending/run/reify_this.check | 5 + test/pending/run/reify_this.scala | 31 ++++++ test/pending/run/t5269.check | 1 - test/pending/run/t5269.scala | 22 ----- test/pending/run/t5274_1.check | 3 - test/pending/run/t5274_1.scala | 20 ---- test/pending/run/t5275.check | 1 - test/pending/run/t5275.scala | 15 --- test/pending/run/t5277_1.check | 1 - test/pending/run/t5277_1.scala | 21 ---- test/pending/run/t5277_2.check | 2 - test/pending/run/t5277_2.scala | 18 ---- test/pending/run/t5415.check | 0 test/pending/run/t5415.scala | 14 +++ 67 files changed, 625 insertions(+), 274 deletions(-) create mode 100644 test/files/run/reify_complex.check create mode 100644 test/files/run/reify_complex.scala create mode 100644 test/files/run/reify_extendbuiltins.check create mode 100644 test/files/run/reify_extendbuiltins.scala create mode 100644 test/files/run/reify_generic2.check create mode 100644 test/files/run/reify_generic2.scala create mode 100644 test/files/run/reify_getter.check create mode 100644 test/files/run/reify_getter.scala create mode 100644 test/files/run/reify_sort1.check create mode 100644 test/files/run/reify_sort1.scala create mode 100644 test/files/run/t5269.check create mode 100644 test/files/run/t5269.scala create mode 100644 test/files/run/t5274_1.check create mode 100644 test/files/run/t5274_1.scala create mode 100644 test/files/run/t5275.check create mode 100644 test/files/run/t5275.scala create mode 100644 test/files/run/t5277_1.check create mode 100644 test/files/run/t5277_1.scala create mode 100644 test/files/run/t5277_2.check create mode 100644 test/files/run/t5277_2.scala create mode 100644 test/files/run/t5335.check create mode 100644 test/files/run/t5335.scala create mode 100644 test/pending/run/reify_closure7.check create mode 100644 test/pending/run/reify_closure7.scala create mode 100644 test/pending/run/reify_closure8a.check create mode 100644 test/pending/run/reify_closure8a.scala create mode 100644 test/pending/run/reify_closure8b.check create mode 100644 test/pending/run/reify_closure8b.scala create mode 100644 test/pending/run/reify_closure9a.check create mode 100644 test/pending/run/reify_closure9a.scala create mode 100644 test/pending/run/reify_closure9b.check create mode 100644 test/pending/run/reify_closure9b.scala create mode 100644 test/pending/run/reify_closures10.check create mode 100644 test/pending/run/reify_closures10.scala create mode 100644 test/pending/run/reify_closures11.check create mode 100644 test/pending/run/reify_closures11.scala delete mode 100644 test/pending/run/reify_complex.check delete mode 100644 test/pending/run/reify_complex.scala delete mode 100644 test/pending/run/reify_extendbuiltins.check delete mode 100644 test/pending/run/reify_extendbuiltins.scala delete mode 100644 test/pending/run/reify_sort1.check delete mode 100644 test/pending/run/reify_sort1.scala create mode 100644 test/pending/run/reify_this.check create mode 100644 test/pending/run/reify_this.scala delete mode 100644 test/pending/run/t5269.check delete mode 100644 test/pending/run/t5269.scala delete mode 100644 test/pending/run/t5274_1.check delete mode 100644 test/pending/run/t5274_1.scala delete mode 100644 test/pending/run/t5275.check delete mode 100644 test/pending/run/t5275.scala delete mode 100644 test/pending/run/t5277_1.check delete mode 100644 test/pending/run/t5277_1.scala delete mode 100644 test/pending/run/t5277_2.check delete mode 100644 test/pending/run/t5277_2.scala create mode 100644 test/pending/run/t5415.check create mode 100644 test/pending/run/t5415.scala diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index 4f5b28d370..6c843e6f15 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -327,8 +327,19 @@ trait Importers { self: SymbolTable => null } if (mytree != null) { - if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol) - mytree.tpe = importType(tree.tpe) + val mysym = if (tree hasSymbol) importSymbol(tree.symbol) else NoSymbol + val mytpe = importType(tree.tpe) + + mytree match { + case mytt: TypeTree => + val tt = tree.asInstanceOf[from.TypeTree] + if (mytree hasSymbol) mytt.symbol = mysym + if (tt.wasEmpty) mytt.defineType(mytpe) else mytt.setType(mytpe) + if (tt.original != null) mytt.setOriginal(importTree(tt.original)) + case _ => + if (mytree hasSymbol) mytree.symbol = importSymbol(tree.symbol) + mytree.tpe = importType(tree.tpe) + } } mytree } diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 46d890c5d1..6e671ae06e 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -44,17 +44,19 @@ trait ToolBoxes extends { self: Universe => // !!! Why is this is in the empty package? If it's only to make // it inaccessible then please put it somewhere designed for that // rather than polluting the empty package with synthetics. + trace("typing: ")(showAttributed(tree)) val ownerClass = EmptyPackageClass.newClassWithInfo(newTypeName(""), List(ObjectClass.tpe), newScope) val owner = ownerClass.newLocalDummy(tree.pos) - - typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt) + val ttree = typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt) + trace("typed: ")(showAttributed(ttree)) + ttree } - + def defOwner(tree: Tree): Symbol = tree find (_.isDef) map (_.symbol) match { case Some(sym) if sym != null && sym != NoSymbol => sym.owner case _ => NoSymbol } - + def wrapInObject(expr: Tree, fvs: List[Symbol]): ModuleDef = { val obj = EmptyPackageClass.newModule(nextWrapperModuleName()) val minfo = ClassInfoType(List(ObjectClass.tpe, ScalaObjectClass.tpe), newScope, obj.moduleClass) @@ -66,9 +68,7 @@ trait ToolBoxes extends { self: Universe => minfo.decls enter meth trace("wrapping ")(defOwner(expr) -> meth) val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth)) - trace("wrapped: ")(showAttributed(methdef)) - resetAllAttrs( - ModuleDef( + val moduledef = ModuleDef( obj, Template( List(TypeTree(ObjectClass.tpe)), @@ -77,7 +77,11 @@ trait ToolBoxes extends { self: Universe => List(), List(List()), List(methdef), - NoPosition))) + NoPosition)) + trace("wrapped: ")(showAttributed(moduledef)) + val cleanedUp = resetLocalAttrs(moduledef) + trace("cleaned up: ")(showAttributed(cleanedUp)) + cleanedUp } def wrapInPackage(clazz: Tree): PackageDef = @@ -91,7 +95,7 @@ trait ToolBoxes extends { self: Universe => def compileExpr(expr: Tree, fvs: List[Symbol]): String = { val mdef = wrapInObject(expr, fvs) - val pdef = trace("wrapped: ")(wrapInPackage(mdef)) + val pdef = wrapInPackage(mdef) val unit = wrapInCompilationUnit(pdef) val run = new Run run.compileUnits(List(unit), run.namerPhase) @@ -104,24 +108,27 @@ trait ToolBoxes extends { self: Universe => def runExpr(expr: Tree): Any = { val etpe = expr.tpe val fvs = (expr filter isFree map (_.symbol)).distinct - + reporter.reset() val className = compileExpr(expr, fvs) if (reporter.hasErrors) { throw new Error("reflective compilation has failed") } - + if (settings.debug.value) println("generated: "+className) val jclazz = jClass.forName(moduleFileName(className), true, classLoader) val jmeth = jclazz.getDeclaredMethods.find(_.getName == wrapperMethodName).get val jfield = jclazz.getDeclaredFields.find(_.getName == NameTransformer.MODULE_INSTANCE_NAME).get val singleton = jfield.get(null) - val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*) - if (etpe.typeSymbol != FunctionClass(0)) result - else { - val applyMeth = result.getClass.getMethod("apply") - applyMeth.invoke(result) - } + // @odersky writes: Not sure we will be able to drop this. I forgot the reason why we dereference () functions, + // but there must have been one. So I propose to leave old version in comments to be resurrected if the problem resurfaces. +// val result = jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*) +// if (etpe.typeSymbol != FunctionClass(0)) result +// else { +// val applyMeth = result.getClass.getMethod("apply") +// applyMeth.invoke(result) +// } + jmeth.invoke(singleton, fvs map (sym => sym.asInstanceOf[FreeVar].value.asInstanceOf[AnyRef]): _*) } def showAttributed(tree: Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = { @@ -131,7 +138,7 @@ trait ToolBoxes extends { self: Universe => try { settings.printtypes.value = printTypes settings.uniqid.value = printIds - settings.uniqid.value = printKinds + settings.Yshowsymkinds.value = printKinds tree.toString } finally { settings.printtypes.value = saved1 @@ -167,7 +174,7 @@ trait ToolBoxes extends { self: Universe => lazy val exporter = importer.reverse lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader) - + private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = { // need to establish a run an phase because otherwise we run into an assertion in TypeHistory // that states that the period must be different from NoPeriod @@ -189,8 +196,8 @@ trait ToolBoxes extends { self: Universe => def typeCheck(tree: rm.Tree): rm.Tree = typeCheck(tree, WildcardType.asInstanceOf[rm.Type]) - def showAttributed(tree: rm.Tree): String = - compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree])) + def showAttributed(tree: rm.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = + compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]), printTypes, printIds, printKinds) def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = { val ttree = importAndTypeCheck(tree, expectedType) diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index c80b07c44d..83b6252b26 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -42,8 +42,8 @@ trait Trees extends reflect.internal.Trees { self: Global => /** emitted by typer, eliminated by refchecks */ case class TypeTreeWithDeferredRefCheck()(val check: () => TypeTree) extends TypTree - - /** Marks underlying reference to id as boxed. + + /** Marks underlying reference to id as boxed. * @pre: id must refer to a captured variable * A reference such marked will refer to the boxed entity, no dereferencing * with `.elem` is done on it. @@ -208,7 +208,7 @@ trait Trees extends reflect.internal.Trees { self: Global => case _ => this.treeCopy.SelectFromArray(tree, qualifier, selector, erasure) } def ReferenceToBoxed(tree: Tree, idt: Ident) = tree match { - case t @ ReferenceToBoxed(idt0) + case t @ ReferenceToBoxed(idt0) if (idt0 == idt) => t case _ => this.treeCopy.ReferenceToBoxed(tree, idt) } @@ -251,62 +251,79 @@ trait Trees extends reflect.internal.Trees { self: Global => } } - /** resets symbol and tpe fields in a tree, @see ResetAttrsTraverse + /** resets symbol and tpe fields in a tree, @see ResetAttrs */ // def resetAllAttrs[A<:Tree](x:A): A = { new ResetAttrsTraverser().traverse(x); x } // def resetLocalAttrs[A<:Tree](x:A): A = { new ResetLocalAttrsTraverser().traverse(x); x } - - def resetAllAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(false).transformPoly(x) - def resetLocalAttrs[A<:Tree](x:A): A = new ResetAttrsTransformer(true).transformPoly(x) + + def resetAllAttrs[A<:Tree](x:A): A = new ResetAttrs(false).transform(x) + def resetLocalAttrs[A<:Tree](x:A): A = new ResetAttrs(true).transform(x) /** A transformer which resets symbol and tpe fields of all nodes in a given tree, * with special treatment of: * TypeTree nodes: are replaced by their original if it exists, otherwise tpe field is reset * to empty if it started out empty or refers to local symbols (which are erased). * TypeApply nodes: are deleted if type arguments end up reverted to empty - * This(pkg) notes where pkg is a pckage: these are kept. + * This(pkg) nodes where pkg is a package: these are kept. * - * (bq:) This traverser has mutable state and should be discarded after use + * (bq:) This transformer has mutable state and should be discarded after use */ - private class ResetAttrsTransformer(localOnly: Boolean) extends Transformer { - private val erasedSyms = util.HashSet[Symbol](8) - private def resetDef(tree: Tree) { - if (tree.symbol != null && tree.symbol != NoSymbol) - erasedSyms addEntry tree.symbol - tree.symbol = NoSymbol + private class ResetAttrs(localOnly: Boolean) { + val locals = util.HashSet[Symbol](8) + + class MarkLocals extends self.Traverser { + def markLocal(tree: Tree) = + if (tree.symbol != null && tree.symbol != NoSymbol) + locals addEntry tree.symbol + + override def traverse(tree: Tree) = { + tree match { + case _: DefTree | Function(_, _) | Template(_, _, _) => + markLocal(tree) + case _ if tree.symbol.isInstanceOf[FreeVar] => + markLocal(tree) + case _ => + ; + } + + super.traverse(tree) + } } - override def transform(tree: Tree): Tree = super.transform { - tree match { - case Template(_, _, body) => - body foreach resetDef - resetDef(tree) - tree.tpe = null - tree - case _: DefTree | Function(_, _) | Template(_, _, _) => - resetDef(tree) - tree.tpe = null - tree - case tpt: TypeTree => - if (tpt.original != null) - tpt.original - else if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => erasedSyms contains tp.typeSymbol)))) - tpt.tpe = null - tree - case TypeApply(fn, args) if args map transform exists (_.isEmpty) => - fn - case This(_) if tree.symbol != null && tree.symbol.isPackageClass => - tree - case EmptyTree => - tree - case _ => - if (tree.hasSymbol && (!localOnly || (erasedSyms contains tree.symbol))) - tree.symbol = NoSymbol - tree.tpe = null - tree + + class Transformer extends self.Transformer { + override def transform(tree: Tree): Tree = super.transform { + tree match { + case tpt: TypeTree => + if (tpt.original != null) { + transform(tpt.original) + } else { + if (tpt.tpe != null && (tpt.wasEmpty || (tpt.tpe exists (tp => locals contains tp.typeSymbol)))) + tpt.tpe = null + tree + } + case TypeApply(fn, args) if args map transform exists (_.isEmpty) => + transform(fn) + case This(_) if tree.symbol != null && tree.symbol.isPackageClass => + tree + case EmptyTree => + tree + case _ => + if (tree.hasSymbol && (!localOnly || (locals contains tree.symbol))) + tree.symbol = NoSymbol + tree.tpe = null + tree + } } } - def transformPoly[T <: Tree](x: T): T = { - val x1 = transform(x) + + def transform[T <: Tree](x: T): T = { + new MarkLocals().traverse(x) + + val trace = scala.tools.nsc.util.trace when settings.debug.value + val eoln = System.getProperty("line.separator") + trace("locals (%d total): %n".format(locals.size))(locals.toList map {" " + _} mkString eoln) + + val x1 = new Transformer().transform(x) assert(x.getClass isInstance x1) x1.asInstanceOf[T] } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index 107ffc35c6..d1ce460eb9 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -166,7 +166,9 @@ trait ScalaSettings extends AbsScalaSettings val Ypmatdebug = BooleanSetting ("-Ypmat-debug", "Trace all pattern matcher activity.") val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") - val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification actions.") + val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification.") + val Yreifytyperdebug + = BooleanSetting ("-Yreifytyper-debug", "Trace typings of reified trees.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") . withPostSetHook(_ => interpreter.replProps.debug setValue true) diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala index f1182fc2a9..197a52f011 100644 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala @@ -55,10 +55,16 @@ abstract class LiftCode extends Transform with TypingTransformers { class Codifier(unit: CompilationUnit) extends TypingTransformer(unit) { val reifyDebug = settings.Yreifydebug.value + val reifyTyperDebug = settings.Yreifytyperdebug.value val debugTrace = util.trace when reifyDebug val reifyCopypaste = settings.Yreifycopypaste.value def printCopypaste(tree: Tree) { + if (reifyDebug) println("=======================") + printCopypaste1(tree) + if (reifyDebug) println("=======================") + } + def printCopypaste1(tree: Tree) { import scala.reflect.api.Modifier import scala.reflect.api.Modifier._ @@ -123,11 +129,14 @@ abstract class LiftCode extends Transform with TypingTransformers { case Apply(_, List(tree)) if sym == Code_lift => // reify Code.lift[T](expr) instances val saved = printTypings try { - printTypings = reifyDebug + debugTrace("transforming = ")(if (settings.Xshowtrees.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) debugTrace("transformed = ") { - val result = localTyper.typedPos(tree.pos)(codify(super.transform(tree))) - if (reifyCopypaste) printCopypaste(result) - result + val untyped = codify(super.transform(tree)) + if (reifyCopypaste) printCopypaste(untyped) + + printTypings = reifyTyperDebug + val typed = localTyper.typedPos(tree.pos)(untyped) + typed } } catch { case ex: ReifierError => @@ -145,7 +154,8 @@ abstract class LiftCode extends Transform with TypingTransformers { val targetType = definitions.CodeClass.primaryConstructor.info.paramTypes.head val reifier = new Reifier() val arg = gen.mkAsInstanceOf(reifier.reifyTopLevel(tree), targetType, wrapInApply = false) - val treetpe = + val treetpe = // this really should use packedType(tree.tpe, context.owner) + // where packedType is defined in Typers. But we can do that only if liftCode is moved to Typers. if (tree.tpe.typeSymbol.isAnonymousClass) tree.tpe.typeSymbol.classBound else tree.tpe New(TypeTree(appliedType(definitions.CodeClass.typeConstructor, List(treetpe.widen))), @@ -274,6 +284,14 @@ abstract class LiftCode extends Transform with TypingTransformers { case None => if (sym == NoSymbol) mirrorSelect("NoSymbol") + else if (sym == RootPackage) + mirrorSelect("definitions.RootPackage") + else if (sym == RootClass) + mirrorSelect("definitions.RootClass") + else if (sym == EmptyPackage) + mirrorSelect("definitions.EmptyPackage") + else if (sym == EmptyPackageClass) + mirrorSelect("definitions.EmptyPackageClass") else if (sym.isModuleClass) Select(reifySymRef(sym.sourceModule), "moduleClass") else if (sym.isStatic && sym.isClass) @@ -300,7 +318,7 @@ abstract class LiftCode extends Transform with TypingTransformers { if (sym.isTerm) { if (reifyDebug) println("Free: " + sym) val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false) - def markIfCaptured(arg: Ident): Tree = + def markIfCaptured(arg: Ident): Tree = if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg mirrorCall("freeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym))) } else { @@ -381,6 +399,14 @@ abstract class LiftCode extends Transform with TypingTransformers { } } + private def definedInLiftedCode(tpe: Type) = + tpe exists (tp => boundSyms contains tp.typeSymbol) + + private def isErased(tree: Tree) = tree match { + case tt: TypeTree => definedInLiftedCode(tt.tpe) && tt.original == null + case _ => false + } + /** Reify a tree */ private def reifyTree(tree: Tree): Tree = tree match { case EmptyTree => @@ -393,13 +419,21 @@ abstract class LiftCode extends Transform with TypingTransformers { mirrorCall("Select", reifyFree(tree), reifyName(nme.elem)) } else reifyFree(tree) case tt: TypeTree if (tt.tpe != null) => - if (!(boundSyms exists (tt.tpe contains _))) mirrorCall("TypeTree", reifyType(tt.tpe)) - else if (tt.original != null) reify(tt.original) - else mirrorCall(nme.TypeTree) + if (definedInLiftedCode(tt.tpe)) { + // erase non-essential (i.e. inferred) types + // reify symless counterparts of essential types + if (tt.original != null) reify(tt.original) else mirrorCall("TypeTree") + } else { + var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe)) + if (tt.original != null) { + val setOriginal = Select(rtt, newTermName("setOriginal")) + val reifiedOriginal = reify(tt.original) + rtt = Apply(setOriginal, List(reifiedOriginal)) + } + rtt + } case ta @ TypeApply(hk, ts) => - val thereAreOnlyTTs = ts collect { case t if !t.isInstanceOf[TypeTree] => t } isEmpty; - val ttsAreNotEssential = ts collect { case tt: TypeTree => tt } find { tt => tt.original != null } isEmpty; - if (thereAreOnlyTTs && ttsAreNotEssential) reifyTree(hk) else reifyProduct(ta) + if (ts exists isErased) reifyTree(hk) else reifyProduct(ta) case global.emptyValDef => mirrorSelect(nme.emptyValDef) case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) => @@ -407,8 +441,10 @@ abstract class LiftCode extends Transform with TypingTransformers { case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym => CannotReifyClassOfBoundEnum(tree, constant.tpe) case _ => - if (tree.isDef) + if (tree.isDef) { + if (reifyDebug) println("boundSym: " + tree.symbol) boundSyms += tree.symbol + } reifyProduct(tree) /* diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d3ff331f98..4cf134d58b 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2915,7 +2915,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { existentialAbstraction(captured.toList, tpe) } - /** convert skolems to existentials */ + /** convert local symbols and skolems to existentials */ def packedType(tree: Tree, owner: Symbol): Type = { def defines(tree: Tree, sym: Symbol) = sym.isExistentialSkolem && sym.unpackLocation == tree || diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala index 08071660a2..3526cf259d 100755 --- a/src/library/scala/reflect/api/StandardDefinitions.scala +++ b/src/library/scala/reflect/api/StandardDefinitions.scala @@ -12,7 +12,7 @@ trait StandardDefinitions { self: Universe => abstract class AbsDefinitions { // outer packages and their classes - def RootPackage: Symbol + def RootPackage: Symbol // under consideration def RootClass: Symbol def EmptyPackage: Symbol def EmptyPackageClass: Symbol @@ -46,6 +46,11 @@ trait StandardDefinitions { self: Universe => def StringClass : Symbol def ClassClass : Symbol + // product, tuple, function + def TupleClass : Array[Symbol] + def ProductClass : Array[Symbol] + def FunctionClass : Array[Symbol] + // fundamental modules def PredefModule: Symbol diff --git a/src/partest/scala/tools/partest/utils/CodeTest.scala b/src/partest/scala/tools/partest/utils/CodeTest.scala index c90168a313..c236d89bbd 100644 --- a/src/partest/scala/tools/partest/utils/CodeTest.scala +++ b/src/partest/scala/tools/partest/utils/CodeTest.scala @@ -24,11 +24,17 @@ object CodeTest { def apply[T](code: Code[T], args: Array[String] = Array()) = { println("testing: "+code.tree) + println("type is: "+code.manifest.tpe) + val isNullary = code.manifest.tpe.typeSymbol == scala.reflect.mirror.definitions.FunctionClass(0) val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter, args mkString " ") val ttree = toolbox.typeCheck(code.tree, code.manifest.tpe) - println("result = " + toolbox.showAttributed(ttree)) - val evaluated = toolbox.runExpr(ttree) + println("result = " + toolbox.showAttributed(ttree, printTypes = true, printIds = false)) + var evaluated = toolbox.runExpr(ttree) + if (evaluated != null && isNullary) { + val applyMeth = evaluated.getClass.getMethod("apply") + evaluated = applyMeth.invoke(evaluated) + } println("evaluated = "+evaluated) evaluated } diff --git a/test/files/run/code.check b/test/files/run/code.check index b946554fda..9b0351bbf9 100644 --- a/test/files/run/code.check +++ b/test/files/run/code.check @@ -1,29 +1,36 @@ testing: ((x: Int) => x.$plus(ys.length)) +type is: Int => Int result = ((x: Int) => x.+{(x: )Int}(ys.length{Int}){Int}){Int => Int} evaluated = testing: (() => { val e: Element = new Element("someName"); e }) +type is: () => Element result = (() => { val e: Element = new Element{Element}{(name: )Element}("someName"{String("someName")}){Element}; e{Element} }{Element}){() => Element} evaluated = Element(someName) testing: (() => truc.elem = 6) +type is: () => Unit result = (() => truc.elem{Int} = 6{Int(6)}{Unit}){() => Unit} evaluated = null testing: (() => truc.elem = truc.elem.$plus(6)) +type is: () => Unit result = (() => truc.elem{Int} = truc.elem.+{(x: )Int}(6{Int(6)}){Int}{Unit}){() => Unit} evaluated = null testing: (() => new baz.BazElement("someName")) +type is: () => baz.BazElement result = (() => new baz.BazElement{baz.BazElement}{(name: )baz.BazElement}("someName"{String("someName")}){baz.BazElement}){() => baz.BazElement} evaluated = BazElement(someName) testing: ((x: Int) => x.$plus(ys.length)) +type is: Int => Int result = ((x: Int) => x.+{(x: )Int}(ys.length{Int}){Int}){Int => Int} evaluated = static: 2 testing: (() => x.$plus(1)) +type is: () => Int result = (() => x.+{(x: )Int}(1{Int(1)}){Int}){() => Int} evaluated = 2 1+1 = 2 diff --git a/test/files/run/reify_complex.check b/test/files/run/reify_complex.check new file mode 100644 index 0000000000..7df35e33a0 --- /dev/null +++ b/test/files/run/reify_complex.check @@ -0,0 +1 @@ +3.0+4.0*i diff --git a/test/files/run/reify_complex.scala b/test/files/run/reify_complex.scala new file mode 100644 index 0000000000..aae4d558cf --- /dev/null +++ b/test/files/run/reify_complex.scala @@ -0,0 +1,31 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class Complex(val re: Double, val im: Double) { + def + (that: Complex) = + new Complex(re + that.re, im + that.im) + def - (that: Complex) = + new Complex(re - that.re, im - that.im) + def * (that: Complex) = + new Complex(re * that.re - im * that.im, + re * that.im + im * that.re) + def / (that: Complex) = { + val denom = that.re * that.re + that.im * that.im + new Complex((re * that.re + im * that.im) / denom, + (im * that.re - re * that.im) / denom) + } + override def toString = + re + (if (im < 0) "-" + (-im) else "+" + im) + "*i" + } + val x = new Complex(2, 1); val y = new Complex(1, 3) + println(x + y) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_extendbuiltins.check b/test/files/run/reify_extendbuiltins.check new file mode 100644 index 0000000000..a48033a30d --- /dev/null +++ b/test/files/run/reify_extendbuiltins.check @@ -0,0 +1 @@ +10! = 3628800 diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala new file mode 100644 index 0000000000..57acd699ff --- /dev/null +++ b/test/files/run/reify_extendbuiltins.scala @@ -0,0 +1,21 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def fact(n: Int): BigInt = + if (n == 0) 1 else fact(n-1) * n + class Factorizer(n: Int) { + def ! = fact(n) + } + implicit def int2fact(n: Int) = new Factorizer(n) + + println("10! = " + (10!)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_generic2.check b/test/files/run/reify_generic2.check new file mode 100644 index 0000000000..b8626c4cff --- /dev/null +++ b/test/files/run/reify_generic2.check @@ -0,0 +1 @@ +4 diff --git a/test/files/run/reify_generic2.scala b/test/files/run/reify_generic2.scala new file mode 100644 index 0000000000..d03fe7602b --- /dev/null +++ b/test/files/run/reify_generic2.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C + val product = List(new C, new C).length * List[C](new C, new C).length + println(product) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_getter.check b/test/files/run/reify_getter.check new file mode 100644 index 0000000000..5ef4ff4d04 --- /dev/null +++ b/test/files/run/reify_getter.check @@ -0,0 +1 @@ +evaluated = 2 diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala new file mode 100644 index 0000000000..83eaded506 --- /dev/null +++ b/test/files/run/reify_getter.scala @@ -0,0 +1,19 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { + val x = 2 + } + + new C().x + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + val evaluated = toolbox.runExpr(ttree) + println("evaluated = " + evaluated) +} diff --git a/test/files/run/reify_sort1.check b/test/files/run/reify_sort1.check new file mode 100644 index 0000000000..0d30805141 --- /dev/null +++ b/test/files/run/reify_sort1.check @@ -0,0 +1,2 @@ +List(6, 2, 8, 5, 1) +List(1, 2, 5, 6, 8) diff --git a/test/files/run/reify_sort1.scala b/test/files/run/reify_sort1.scala new file mode 100644 index 0000000000..42f4c824a5 --- /dev/null +++ b/test/files/run/reify_sort1.scala @@ -0,0 +1,27 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def sort(a: List[Int]): List[Int] = { + if (a.length < 2) + a + else { + val pivot = a(a.length / 2) + sort(a.filter(_ < pivot)) ::: + a.filter(_ == pivot) ::: + sort(a.filter(_ > pivot)) + } + } + + val xs = List(6, 2, 8, 5, 1) + println(xs) + println(sort(xs)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5269.check b/test/files/run/t5269.check new file mode 100644 index 0000000000..0cfbf08886 --- /dev/null +++ b/test/files/run/t5269.check @@ -0,0 +1 @@ +2 diff --git a/test/files/run/t5269.scala b/test/files/run/t5269.scala new file mode 100644 index 0000000000..a30509f3fe --- /dev/null +++ b/test/files/run/t5269.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + trait Z { + val z = 2 + } + + class X extends Z { + def println() = Predef.println(z) + } + + new X().println() + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5274_1.check b/test/files/run/t5274_1.check new file mode 100644 index 0000000000..fca8bc3d3e --- /dev/null +++ b/test/files/run/t5274_1.check @@ -0,0 +1,3 @@ +50! = 30414093201713378043612608166064768844377641568960512000000000000 +49! = 608281864034267560872252163321295376887552831379210240000000000 +50!/49! = 50 diff --git a/test/files/run/t5274_1.scala b/test/files/run/t5274_1.scala new file mode 100644 index 0000000000..c501172518 --- /dev/null +++ b/test/files/run/t5274_1.scala @@ -0,0 +1,20 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def factorial(n: BigInt): BigInt = + if (n == 0) 1 else n * factorial(n-1) + + val f50 = factorial(50); val f49 = factorial(49) + println("50! = " + f50) + println("49! = " + f49) + println("50!/49! = " + (f50 / f49)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5275.check b/test/files/run/t5275.check new file mode 100644 index 0000000000..0cfbf08886 --- /dev/null +++ b/test/files/run/t5275.check @@ -0,0 +1 @@ +2 diff --git a/test/files/run/t5275.scala b/test/files/run/t5275.scala new file mode 100644 index 0000000000..d419834ded --- /dev/null +++ b/test/files/run/t5275.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C(val foo: Int) + println(new C(2).foo) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5277_1.check b/test/files/run/t5277_1.check new file mode 100644 index 0000000000..a48033a30d --- /dev/null +++ b/test/files/run/t5277_1.check @@ -0,0 +1 @@ +10! = 3628800 diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala new file mode 100644 index 0000000000..57acd699ff --- /dev/null +++ b/test/files/run/t5277_1.scala @@ -0,0 +1,21 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def fact(n: Int): BigInt = + if (n == 0) 1 else fact(n-1) * n + class Factorizer(n: Int) { + def ! = fact(n) + } + implicit def int2fact(n: Int) = new Factorizer(n) + + println("10! = " + (10!)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5277_2.check b/test/files/run/t5277_2.check new file mode 100644 index 0000000000..ca017e2a40 --- /dev/null +++ b/test/files/run/t5277_2.check @@ -0,0 +1,2 @@ +2() +1() diff --git a/test/files/run/t5277_2.scala b/test/files/run/t5277_2.scala new file mode 100644 index 0000000000..67b6b000bc --- /dev/null +++ b/test/files/run/t5277_2.scala @@ -0,0 +1,18 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + def p(implicit i: Int) = print(i) + implicit val v = 2 + + println(p) + println(p(1)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5335.check b/test/files/run/t5335.check new file mode 100644 index 0000000000..0cfbf08886 --- /dev/null +++ b/test/files/run/t5335.check @@ -0,0 +1 @@ +2 diff --git a/test/files/run/t5335.scala b/test/files/run/t5335.scala new file mode 100644 index 0000000000..9a8b91f04d --- /dev/null +++ b/test/files/run/t5335.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + println(new {def x = 2}.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/pending/run/reify_closure6.check b/test/pending/run/reify_closure6.check index 3526d04b0e..e521ea874d 100644 --- a/test/pending/run/reify_closure6.check +++ b/test/pending/run/reify_closure6.check @@ -1,3 +1,7 @@ +q = 1 +y = 1 first invocation = 15 -second invocation = 18 +q = 2 +y = 1 +second invocation = 17 q after second invocation = 2 diff --git a/test/pending/run/reify_closure6.scala b/test/pending/run/reify_closure6.scala index 909071aa44..43ddfde28d 100644 --- a/test/pending/run/reify_closure6.scala +++ b/test/pending/run/reify_closure6.scala @@ -10,6 +10,8 @@ object Test extends App { val fun: reflect.Code[Int => Int] = x => { y += 1 q += 1 + println("q = " + q) + println("y = " + y) x + ys.length * z + q + y } diff --git a/test/pending/run/reify_closure7.check b/test/pending/run/reify_closure7.check new file mode 100644 index 0000000000..bf58b52bce --- /dev/null +++ b/test/pending/run/reify_closure7.check @@ -0,0 +1,6 @@ +q = 1 +y = 1 +first invocation = 15 +q = 2 +y = 2 +second invocation = 17 diff --git a/test/pending/run/reify_closure7.scala b/test/pending/run/reify_closure7.scala new file mode 100644 index 0000000000..8933df23fa --- /dev/null +++ b/test/pending/run/reify_closure7.scala @@ -0,0 +1,32 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + var q = 0 + var clo: Int => Int = null + def foo[T](ys: List[T]): Int => Int = { + val z = 1 + var y = 0 + val fun: reflect.Code[Int => Int] = x => { + y += 1 + q += 1 + println("q = " + q) + println("y = " + y) + x + ys.length * z + q + y + } + + if (clo == null) { + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + clo = dyn.asInstanceOf[Int => Int] + } + + clo + } + + println("first invocation = " + foo(List(1, 2, 3))(10)) + println("second invocation = " + foo(List(1, 2, 3, 4))(10)) +} diff --git a/test/pending/run/reify_closure8a.check b/test/pending/run/reify_closure8a.check new file mode 100644 index 0000000000..9a037142aa --- /dev/null +++ b/test/pending/run/reify_closure8a.check @@ -0,0 +1 @@ +10 \ No newline at end of file diff --git a/test/pending/run/reify_closure8a.scala b/test/pending/run/reify_closure8a.scala new file mode 100644 index 0000000000..5e54bfc8c7 --- /dev/null +++ b/test/pending/run/reify_closure8a.scala @@ -0,0 +1,17 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + class Foo(val y: Int) { + def fun = lift{y} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(new Foo(10).fun.tree) + val dyn = toolbox.runExpr(ttree) + val foo = dyn.asInstanceOf[Int] + println(foo) +} diff --git a/test/pending/run/reify_closure8b.check b/test/pending/run/reify_closure8b.check new file mode 100644 index 0000000000..9a037142aa --- /dev/null +++ b/test/pending/run/reify_closure8b.check @@ -0,0 +1 @@ +10 \ No newline at end of file diff --git a/test/pending/run/reify_closure8b.scala b/test/pending/run/reify_closure8b.scala new file mode 100644 index 0000000000..9e37e4e09a --- /dev/null +++ b/test/pending/run/reify_closure8b.scala @@ -0,0 +1,17 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + class Foo(y: Int) { + def fun = lift{y} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(new Foo(10).fun.tree) + val dyn = toolbox.runExpr(ttree) + val foo = dyn.asInstanceOf[Int] + println(foo) +} diff --git a/test/pending/run/reify_closure9a.check b/test/pending/run/reify_closure9a.check new file mode 100644 index 0000000000..9a037142aa --- /dev/null +++ b/test/pending/run/reify_closure9a.check @@ -0,0 +1 @@ +10 \ No newline at end of file diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala new file mode 100644 index 0000000000..f3ee153d3c --- /dev/null +++ b/test/pending/run/reify_closure9a.scala @@ -0,0 +1,20 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo(y: Int) = { + class Foo(val y: Int) { + def fun = lift{y} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(new Foo(y).fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int] + } + + println(foo(10)) +} diff --git a/test/pending/run/reify_closure9b.check b/test/pending/run/reify_closure9b.check new file mode 100644 index 0000000000..9a037142aa --- /dev/null +++ b/test/pending/run/reify_closure9b.check @@ -0,0 +1 @@ +10 \ No newline at end of file diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala new file mode 100644 index 0000000000..8d349e8701 --- /dev/null +++ b/test/pending/run/reify_closure9b.scala @@ -0,0 +1,20 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo(y: Int) = { + class Foo(y: Int) { + def fun = lift{y} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(new Foo(y).fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int] + } + + println(foo(10)) +} diff --git a/test/pending/run/reify_closures10.check b/test/pending/run/reify_closures10.check new file mode 100644 index 0000000000..fd3c81a4d7 --- /dev/null +++ b/test/pending/run/reify_closures10.check @@ -0,0 +1,2 @@ +5 +5 diff --git a/test/pending/run/reify_closures10.scala b/test/pending/run/reify_closures10.scala new file mode 100644 index 0000000000..d0f895ae4d --- /dev/null +++ b/test/pending/run/reify_closures10.scala @@ -0,0 +1,15 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val x = 2 + val y = 3 + val code = lift{println(x + y); x + y} + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + println(toolbox.runExpr(ttree)) +} diff --git a/test/pending/run/reify_closures11.check b/test/pending/run/reify_closures11.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/pending/run/reify_closures11.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala new file mode 100644 index 0000000000..42053bd029 --- /dev/null +++ b/test/pending/run/reify_closures11.scala @@ -0,0 +1,18 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def fun() = { + def z() = 2 + lift{z} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun().tree) + val dyn = toolbox.runExpr(ttree) + val foo = dyn.asInstanceOf[Int] + println(foo) +} diff --git a/test/pending/run/reify_complex.check b/test/pending/run/reify_complex.check deleted file mode 100644 index 7df35e33a0..0000000000 --- a/test/pending/run/reify_complex.check +++ /dev/null @@ -1 +0,0 @@ -3.0+4.0*i diff --git a/test/pending/run/reify_complex.scala b/test/pending/run/reify_complex.scala deleted file mode 100644 index aae4d558cf..0000000000 --- a/test/pending/run/reify_complex.scala +++ /dev/null @@ -1,31 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class Complex(val re: Double, val im: Double) { - def + (that: Complex) = - new Complex(re + that.re, im + that.im) - def - (that: Complex) = - new Complex(re - that.re, im - that.im) - def * (that: Complex) = - new Complex(re * that.re - im * that.im, - re * that.im + im * that.re) - def / (that: Complex) = { - val denom = that.re * that.re + that.im * that.im - new Complex((re * that.re + im * that.im) / denom, - (im * that.re - re * that.im) / denom) - } - override def toString = - re + (if (im < 0) "-" + (-im) else "+" + im) + "*i" - } - val x = new Complex(2, 1); val y = new Complex(1, 3) - println(x + y) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_extendbuiltins.check b/test/pending/run/reify_extendbuiltins.check deleted file mode 100644 index a48033a30d..0000000000 --- a/test/pending/run/reify_extendbuiltins.check +++ /dev/null @@ -1 +0,0 @@ -10! = 3628800 diff --git a/test/pending/run/reify_extendbuiltins.scala b/test/pending/run/reify_extendbuiltins.scala deleted file mode 100644 index 57acd699ff..0000000000 --- a/test/pending/run/reify_extendbuiltins.scala +++ /dev/null @@ -1,21 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def fact(n: Int): BigInt = - if (n == 0) 1 else fact(n-1) * n - class Factorizer(n: Int) { - def ! = fact(n) - } - implicit def int2fact(n: Int) = new Factorizer(n) - - println("10! = " + (10!)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_sort1.check b/test/pending/run/reify_sort1.check deleted file mode 100644 index 0d30805141..0000000000 --- a/test/pending/run/reify_sort1.check +++ /dev/null @@ -1,2 +0,0 @@ -List(6, 2, 8, 5, 1) -List(1, 2, 5, 6, 8) diff --git a/test/pending/run/reify_sort1.scala b/test/pending/run/reify_sort1.scala deleted file mode 100644 index 42f4c824a5..0000000000 --- a/test/pending/run/reify_sort1.scala +++ /dev/null @@ -1,27 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def sort(a: List[Int]): List[Int] = { - if (a.length < 2) - a - else { - val pivot = a(a.length / 2) - sort(a.filter(_ < pivot)) ::: - a.filter(_ == pivot) ::: - sort(a.filter(_ > pivot)) - } - } - - val xs = List(6, 2, 8, 5, 1) - println(xs) - println(sort(xs)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_this.check b/test/pending/run/reify_this.check new file mode 100644 index 0000000000..af3d0652a9 --- /dev/null +++ b/test/pending/run/reify_this.check @@ -0,0 +1,5 @@ +foo +false +2 +bar +2 \ No newline at end of file diff --git a/test/pending/run/reify_this.scala b/test/pending/run/reify_this.scala new file mode 100644 index 0000000000..38ef72b6eb --- /dev/null +++ b/test/pending/run/reify_this.scala @@ -0,0 +1,31 @@ +import scala.reflect._ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +trait Eval { + def eval(code: Code[_]): Any = eval(code.tree) + + def eval(tree: Tree): Any = { + val settings = new Settings + val reporter = new ConsoleReporter(settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(tree) + toolbox.runExpr(ttree) + } +} + +object Test extends App with Eval { + // select a value from package + eval(lift{println("foo")}) + eval(lift{println((new Object).toString == (new Object).toString)}) + + // select a type from package + eval(lift{val x: Any = 2; println(x)}) + eval(lift{val x: Object = "bar"; println(x)}) + + // select a value from module + val x = 2 + eval(lift{println(x)}) +} diff --git a/test/pending/run/t5269.check b/test/pending/run/t5269.check deleted file mode 100644 index 0cfbf08886..0000000000 --- a/test/pending/run/t5269.check +++ /dev/null @@ -1 +0,0 @@ -2 diff --git a/test/pending/run/t5269.scala b/test/pending/run/t5269.scala deleted file mode 100644 index a30509f3fe..0000000000 --- a/test/pending/run/t5269.scala +++ /dev/null @@ -1,22 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - trait Z { - val z = 2 - } - - class X extends Z { - def println() = Predef.println(z) - } - - new X().println() - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5274_1.check b/test/pending/run/t5274_1.check deleted file mode 100644 index fca8bc3d3e..0000000000 --- a/test/pending/run/t5274_1.check +++ /dev/null @@ -1,3 +0,0 @@ -50! = 30414093201713378043612608166064768844377641568960512000000000000 -49! = 608281864034267560872252163321295376887552831379210240000000000 -50!/49! = 50 diff --git a/test/pending/run/t5274_1.scala b/test/pending/run/t5274_1.scala deleted file mode 100644 index c501172518..0000000000 --- a/test/pending/run/t5274_1.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def factorial(n: BigInt): BigInt = - if (n == 0) 1 else n * factorial(n-1) - - val f50 = factorial(50); val f49 = factorial(49) - println("50! = " + f50) - println("49! = " + f49) - println("50!/49! = " + (f50 / f49)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5275.check b/test/pending/run/t5275.check deleted file mode 100644 index 0cfbf08886..0000000000 --- a/test/pending/run/t5275.check +++ /dev/null @@ -1 +0,0 @@ -2 diff --git a/test/pending/run/t5275.scala b/test/pending/run/t5275.scala deleted file mode 100644 index d419834ded..0000000000 --- a/test/pending/run/t5275.scala +++ /dev/null @@ -1,15 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class C(val foo: Int) - println(new C(2).foo) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5277_1.check b/test/pending/run/t5277_1.check deleted file mode 100644 index a48033a30d..0000000000 --- a/test/pending/run/t5277_1.check +++ /dev/null @@ -1 +0,0 @@ -10! = 3628800 diff --git a/test/pending/run/t5277_1.scala b/test/pending/run/t5277_1.scala deleted file mode 100644 index 57acd699ff..0000000000 --- a/test/pending/run/t5277_1.scala +++ /dev/null @@ -1,21 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def fact(n: Int): BigInt = - if (n == 0) 1 else fact(n-1) * n - class Factorizer(n: Int) { - def ! = fact(n) - } - implicit def int2fact(n: Int) = new Factorizer(n) - - println("10! = " + (10!)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5277_2.check b/test/pending/run/t5277_2.check deleted file mode 100644 index 5f1d0ecea5..0000000000 --- a/test/pending/run/t5277_2.check +++ /dev/null @@ -1,2 +0,0 @@ -2 -1 diff --git a/test/pending/run/t5277_2.scala b/test/pending/run/t5277_2.scala deleted file mode 100644 index 67b6b000bc..0000000000 --- a/test/pending/run/t5277_2.scala +++ /dev/null @@ -1,18 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - def p(implicit i: Int) = print(i) - implicit val v = 2 - - println(p) - println(p(1)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5415.check b/test/pending/run/t5415.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/t5415.scala b/test/pending/run/t5415.scala new file mode 100644 index 0000000000..3db356da86 --- /dev/null +++ b/test/pending/run/t5415.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import scala.reflect.runtime.Mirror.ToolBox + +object Test extends App{ + case class Queryable2[T]() { def filter(predicate: T => Boolean) = ??? } + trait CoffeesTable{ def sales : Int } + val q = Queryable2[CoffeesTable]() + val code = scala.reflect.Code.lift{q.filter(_.sales > 5)} + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) +} -- cgit v1.2.3 From 363f8af6a8c157485a644d00d75e2df10e71e661 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Thu, 2 Feb 2012 15:29:55 +0100 Subject: Fixes reifyThis --- src/compiler/scala/reflect/internal/StdNames.scala | 1 + src/compiler/scala/reflect/internal/Trees.scala | 2 - .../scala/tools/nsc/transform/LiftCode.scala | 15 +++++- src/library/scala/reflect/api/Trees.scala | 3 ++ test/files/run/reify_closure1.check | 2 + test/files/run/reify_closure1.scala | 20 ++++++++ test/files/run/reify_closure2a.check | 2 + test/files/run/reify_closure2a.scala | 20 ++++++++ test/files/run/reify_closure3a.check | 2 + test/files/run/reify_closure3a.scala | 22 +++++++++ test/files/run/reify_closure4a.check | 2 + test/files/run/reify_closure4a.scala | 22 +++++++++ test/files/run/reify_closure5a.check | 2 + test/files/run/reify_closure5a.scala | 20 ++++++++ test/files/run/reify_closure6.check | 7 +++ test/files/run/reify_closure6.scala | 28 +++++++++++ test/files/run/reify_closure7.check | 6 +++ test/files/run/reify_closure7.scala | 32 ++++++++++++ test/files/run/reify_closure8a.check | 1 + test/files/run/reify_closure8a.scala | 17 +++++++ test/files/run/reify_closures10.check | 2 + test/files/run/reify_closures10.scala | 15 ++++++ test/files/run/reify_implicits.check | 1 + test/files/run/reify_implicits.scala | 21 ++++++++ test/files/run/reify_sort.check | 2 + test/files/run/reify_sort.scala | 57 ++++++++++++++++++++++ test/files/run/reify_this.check | 5 ++ test/files/run/reify_this.scala | 31 ++++++++++++ test/files/run/t5274_2.check | 2 + test/files/run/t5274_2.scala | 57 ++++++++++++++++++++++ test/files/run/t5279.check | 1 + test/files/run/t5279.scala | 14 ++++++ test/files/run/t5415.check | 0 test/files/run/t5415.scala | 14 ++++++ test/pending/run/reify_closure1.check | 2 - test/pending/run/reify_closure1.scala | 20 -------- test/pending/run/reify_closure2a.check | 2 - test/pending/run/reify_closure2a.scala | 20 -------- test/pending/run/reify_closure3a.check | 2 - test/pending/run/reify_closure3a.scala | 22 --------- test/pending/run/reify_closure4a.check | 2 - test/pending/run/reify_closure4a.scala | 22 --------- test/pending/run/reify_closure5a.check | 2 - test/pending/run/reify_closure5a.scala | 20 -------- test/pending/run/reify_closure6.check | 7 --- test/pending/run/reify_closure6.scala | 28 ----------- test/pending/run/reify_closure7.check | 6 --- test/pending/run/reify_closure7.scala | 32 ------------ test/pending/run/reify_closure8a.check | 1 - test/pending/run/reify_closure8a.scala | 17 ------- test/pending/run/reify_closures10.check | 2 - test/pending/run/reify_closures10.scala | 15 ------ test/pending/run/reify_implicits.check | 1 - test/pending/run/reify_implicits.scala | 21 -------- test/pending/run/reify_sort.check | 2 - test/pending/run/reify_sort.scala | 57 ---------------------- test/pending/run/reify_this.check | 5 -- test/pending/run/reify_this.scala | 31 ------------ test/pending/run/t5274_2.check | 2 - test/pending/run/t5274_2.scala | 57 ---------------------- test/pending/run/t5279.check | 1 - test/pending/run/t5279.scala | 14 ------ test/pending/run/t5415.check | 0 test/pending/run/t5415.scala | 14 ------ 64 files changed, 444 insertions(+), 431 deletions(-) create mode 100644 test/files/run/reify_closure1.check create mode 100644 test/files/run/reify_closure1.scala create mode 100644 test/files/run/reify_closure2a.check create mode 100644 test/files/run/reify_closure2a.scala create mode 100644 test/files/run/reify_closure3a.check create mode 100644 test/files/run/reify_closure3a.scala create mode 100644 test/files/run/reify_closure4a.check create mode 100644 test/files/run/reify_closure4a.scala create mode 100644 test/files/run/reify_closure5a.check create mode 100644 test/files/run/reify_closure5a.scala create mode 100644 test/files/run/reify_closure6.check create mode 100644 test/files/run/reify_closure6.scala create mode 100644 test/files/run/reify_closure7.check create mode 100644 test/files/run/reify_closure7.scala create mode 100644 test/files/run/reify_closure8a.check create mode 100644 test/files/run/reify_closure8a.scala create mode 100644 test/files/run/reify_closures10.check create mode 100644 test/files/run/reify_closures10.scala create mode 100644 test/files/run/reify_implicits.check create mode 100644 test/files/run/reify_implicits.scala create mode 100644 test/files/run/reify_sort.check create mode 100644 test/files/run/reify_sort.scala create mode 100644 test/files/run/reify_this.check create mode 100644 test/files/run/reify_this.scala create mode 100644 test/files/run/t5274_2.check create mode 100644 test/files/run/t5274_2.scala create mode 100644 test/files/run/t5279.check create mode 100644 test/files/run/t5279.scala create mode 100644 test/files/run/t5415.check create mode 100644 test/files/run/t5415.scala delete mode 100644 test/pending/run/reify_closure1.check delete mode 100644 test/pending/run/reify_closure1.scala delete mode 100644 test/pending/run/reify_closure2a.check delete mode 100644 test/pending/run/reify_closure2a.scala delete mode 100644 test/pending/run/reify_closure3a.check delete mode 100644 test/pending/run/reify_closure3a.scala delete mode 100644 test/pending/run/reify_closure4a.check delete mode 100644 test/pending/run/reify_closure4a.scala delete mode 100644 test/pending/run/reify_closure5a.check delete mode 100644 test/pending/run/reify_closure5a.scala delete mode 100644 test/pending/run/reify_closure6.check delete mode 100644 test/pending/run/reify_closure6.scala delete mode 100644 test/pending/run/reify_closure7.check delete mode 100644 test/pending/run/reify_closure7.scala delete mode 100644 test/pending/run/reify_closure8a.check delete mode 100644 test/pending/run/reify_closure8a.scala delete mode 100644 test/pending/run/reify_closures10.check delete mode 100644 test/pending/run/reify_closures10.scala delete mode 100644 test/pending/run/reify_implicits.check delete mode 100644 test/pending/run/reify_implicits.scala delete mode 100644 test/pending/run/reify_sort.check delete mode 100644 test/pending/run/reify_sort.scala delete mode 100644 test/pending/run/reify_this.check delete mode 100644 test/pending/run/reify_this.scala delete mode 100644 test/pending/run/t5274_2.check delete mode 100644 test/pending/run/t5274_2.scala delete mode 100644 test/pending/run/t5279.check delete mode 100644 test/pending/run/t5279.scala delete mode 100644 test/pending/run/t5415.check delete mode 100644 test/pending/run/t5415.scala diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala index b3069adfb4..b1a24c0be2 100644 --- a/src/compiler/scala/reflect/internal/StdNames.scala +++ b/src/compiler/scala/reflect/internal/StdNames.scala @@ -271,6 +271,7 @@ trait StdNames extends NameManglers { self: SymbolTable => // Compiler utilized names // val productElementName: NameType = "productElementName" val Ident: NameType = "Ident" + val This: NameType = "This" val StringContext: NameType = "StringContext" val TYPE_ : NameType = "TYPE" val TypeTree: NameType = "TypeTree" diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index 5bb0c98bfb..ca7801ac9d 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -251,8 +251,6 @@ trait Trees extends api.Trees { self: SymbolTable => def Super(sym: Symbol, mix: TypeName): Tree = Super(This(sym), mix) - def This(sym: Symbol): Tree = This(sym.name.toTypeName) setSymbol sym - /** Block factory that flattens directly nested blocks. */ def Block(stats: Tree*): Block = { diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala index 197a52f011..d0ed92f8ba 100644 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ b/src/compiler/scala/tools/nsc/transform/LiftCode.scala @@ -460,8 +460,19 @@ abstract class LiftCode extends Transform with TypingTransformers { * Reify a free reference. The result will be either a mirror reference * to a global value, or else a mirror Literal. */ - private def reifyFree(tree: Tree): Tree = - mirrorCall(nme.Ident, reifySymRef(tree.symbol)) + private def reifyFree(tree: Tree): Tree = tree match { + case This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass => + val sym = tree.symbol + if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) + if (reifyDebug) println("Free: " + sym) + val freeVar = mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), This(sym)) + mirrorCall(nme.Ident, freeVar) + case This(_) => + if (reifyDebug) println("This for %s, reified as This".format(tree.symbol)) + mirrorCall(nme.This, reifySymRef(tree.symbol)) + case _ => + mirrorCall(nme.Ident, reifySymRef(tree.symbol)) + } // todo: consider whether we should also reify positions private def reifyPosition(pos: Position): Tree = diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 03b043c188..0a38fb45bf 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -537,6 +537,9 @@ trait Trees { self: Universe => // The symbol of a This is the class to which the this refers. // For instance in C.this, it would be C. + def This(sym: Symbol): Tree = + This(sym.name.toTypeName) setSymbol sym + /** Designator . */ case class Select(qualifier: Tree, name: Name) extends RefTree diff --git a/test/files/run/reify_closure1.check b/test/files/run/reify_closure1.check new file mode 100644 index 0000000000..b2f7f08c17 --- /dev/null +++ b/test/files/run/reify_closure1.check @@ -0,0 +1,2 @@ +10 +10 diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala new file mode 100644 index 0000000000..825a38dc1d --- /dev/null +++ b/test/files/run/reify_closure1.scala @@ -0,0 +1,20 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo[T](ys: List[T]): Int => Int = { + val fun: reflect.Code[Int => Int] = x => { + x + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println(foo(List(1, 2, 3))(10)) + println(foo(List(1, 2, 3, 4))(10)) +} diff --git a/test/files/run/reify_closure2a.check b/test/files/run/reify_closure2a.check new file mode 100644 index 0000000000..c1f3abd7e6 --- /dev/null +++ b/test/files/run/reify_closure2a.check @@ -0,0 +1,2 @@ +11 +12 diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala new file mode 100644 index 0000000000..b88bec005d --- /dev/null +++ b/test/files/run/reify_closure2a.scala @@ -0,0 +1,20 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo(y: Int): Int => Int = { + val fun: reflect.Code[Int => Int] = x => { + x + y + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println(foo(1)(10)) + println(foo(2)(10)) +} diff --git a/test/files/run/reify_closure3a.check b/test/files/run/reify_closure3a.check new file mode 100644 index 0000000000..c1f3abd7e6 --- /dev/null +++ b/test/files/run/reify_closure3a.check @@ -0,0 +1,2 @@ +11 +12 diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala new file mode 100644 index 0000000000..6414fa58a3 --- /dev/null +++ b/test/files/run/reify_closure3a.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo(y: Int): Int => Int = { + def y1 = y + + val fun: reflect.Code[Int => Int] = x => { + x + y1 + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println(foo(1)(10)) + println(foo(2)(10)) +} diff --git a/test/files/run/reify_closure4a.check b/test/files/run/reify_closure4a.check new file mode 100644 index 0000000000..c1f3abd7e6 --- /dev/null +++ b/test/files/run/reify_closure4a.check @@ -0,0 +1,2 @@ +11 +12 diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala new file mode 100644 index 0000000000..99e9d82706 --- /dev/null +++ b/test/files/run/reify_closure4a.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo(y: Int): Int => Int = { + val y1 = y + + val fun: reflect.Code[Int => Int] = x => { + x + y1 + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println(foo(1)(10)) + println(foo(2)(10)) +} diff --git a/test/files/run/reify_closure5a.check b/test/files/run/reify_closure5a.check new file mode 100644 index 0000000000..df9e19c591 --- /dev/null +++ b/test/files/run/reify_closure5a.check @@ -0,0 +1,2 @@ +13 +14 diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala new file mode 100644 index 0000000000..0ac53d5479 --- /dev/null +++ b/test/files/run/reify_closure5a.scala @@ -0,0 +1,20 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + def foo[T](ys: List[T]): Int => Int = { + val fun: reflect.Code[Int => Int] = x => { + x + ys.length + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println(foo(List(1, 2, 3))(10)) + println(foo(List(1, 2, 3, 4))(10)) +} diff --git a/test/files/run/reify_closure6.check b/test/files/run/reify_closure6.check new file mode 100644 index 0000000000..b9de4c6baf --- /dev/null +++ b/test/files/run/reify_closure6.check @@ -0,0 +1,7 @@ +q = 1 +y = 1 +first invocation = 15 +q = 2 +y = 1 +second invocation = 17 +q after second invocation = 2 \ No newline at end of file diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala new file mode 100644 index 0000000000..54f1791bf2 --- /dev/null +++ b/test/files/run/reify_closure6.scala @@ -0,0 +1,28 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + var q = 0 + def foo[T](ys: List[T]): Int => Int = { + val z = 1 + var y = 0 + val fun: reflect.Code[Int => Int] = x => { + y += 1 + q += 1 + println("q = " + q) + println("y = " + y) + x + ys.length * z + q + y + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + dyn.asInstanceOf[Int => Int] + } + + println("first invocation = " + foo(List(1, 2, 3))(10)) + println("second invocation = " + foo(List(1, 2, 3, 4))(10)) + println("q after second invocation = " + q) +} \ No newline at end of file diff --git a/test/files/run/reify_closure7.check b/test/files/run/reify_closure7.check new file mode 100644 index 0000000000..bf58b52bce --- /dev/null +++ b/test/files/run/reify_closure7.check @@ -0,0 +1,6 @@ +q = 1 +y = 1 +first invocation = 15 +q = 2 +y = 2 +second invocation = 17 diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala new file mode 100644 index 0000000000..8933df23fa --- /dev/null +++ b/test/files/run/reify_closure7.scala @@ -0,0 +1,32 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + var q = 0 + var clo: Int => Int = null + def foo[T](ys: List[T]): Int => Int = { + val z = 1 + var y = 0 + val fun: reflect.Code[Int => Int] = x => { + y += 1 + q += 1 + println("q = " + q) + println("y = " + y) + x + ys.length * z + q + y + } + + if (clo == null) { + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(fun.tree) + val dyn = toolbox.runExpr(ttree) + clo = dyn.asInstanceOf[Int => Int] + } + + clo + } + + println("first invocation = " + foo(List(1, 2, 3))(10)) + println("second invocation = " + foo(List(1, 2, 3, 4))(10)) +} diff --git a/test/files/run/reify_closure8a.check b/test/files/run/reify_closure8a.check new file mode 100644 index 0000000000..9a037142aa --- /dev/null +++ b/test/files/run/reify_closure8a.check @@ -0,0 +1 @@ +10 \ No newline at end of file diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala new file mode 100644 index 0000000000..5e54bfc8c7 --- /dev/null +++ b/test/files/run/reify_closure8a.scala @@ -0,0 +1,17 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + class Foo(val y: Int) { + def fun = lift{y} + } + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(new Foo(10).fun.tree) + val dyn = toolbox.runExpr(ttree) + val foo = dyn.asInstanceOf[Int] + println(foo) +} diff --git a/test/files/run/reify_closures10.check b/test/files/run/reify_closures10.check new file mode 100644 index 0000000000..fd3c81a4d7 --- /dev/null +++ b/test/files/run/reify_closures10.check @@ -0,0 +1,2 @@ +5 +5 diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala new file mode 100644 index 0000000000..d0f895ae4d --- /dev/null +++ b/test/files/run/reify_closures10.scala @@ -0,0 +1,15 @@ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val x = 2 + val y = 3 + val code = lift{println(x + y); x + y} + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + println(toolbox.runExpr(ttree)) +} diff --git a/test/files/run/reify_implicits.check b/test/files/run/reify_implicits.check new file mode 100644 index 0000000000..e3aeb20f6b --- /dev/null +++ b/test/files/run/reify_implicits.check @@ -0,0 +1 @@ +x = List(1, 2, 3, 4) diff --git a/test/files/run/reify_implicits.scala b/test/files/run/reify_implicits.scala new file mode 100644 index 0000000000..a15cef9c97 --- /dev/null +++ b/test/files/run/reify_implicits.scala @@ -0,0 +1,21 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + implicit def arrayWrapper[A : ClassManifest](x: Array[A]) = + new { + def sort(p: (A, A) => Boolean) = { + util.Sorting.stableSort(x, p); x + } + } + val x = Array(2, 3, 1, 4) + println("x = "+ x.sort((x: Int, y: Int) => x < y).toList) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_sort.check b/test/files/run/reify_sort.check new file mode 100644 index 0000000000..375536cc29 --- /dev/null +++ b/test/files/run/reify_sort.check @@ -0,0 +1,2 @@ +[6,2,8,5,1] +[1,2,5,6,8] diff --git a/test/files/run/reify_sort.scala b/test/files/run/reify_sort.scala new file mode 100644 index 0000000000..42991fe5d2 --- /dev/null +++ b/test/files/run/reify_sort.scala @@ -0,0 +1,57 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + /** Nested methods can use and even update everything + * visible in their scope (including local variables or + * arguments of enclosing methods). + */ + def sort(a: Array[Int]) { + + def swap(i: Int, j: Int) { + val t = a(i); a(i) = a(j); a(j) = t + } + + def sort1(l: Int, r: Int) { + val pivot = a((l + r) / 2) + var i = l + var j = r + while (i <= j) { + while (a(i) < pivot) i += 1 + while (a(j) > pivot) j -= 1 + if (i <= j) { + swap(i, j) + i += 1 + j -= 1 + } + } + if (l < j) sort1(l, j) + if (j < r) sort1(i, r) + } + + if (a.length > 0) + sort1(0, a.length - 1) + } + + def println(ar: Array[Int]) { + def print1 = { + def iter(i: Int): String = + ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "") + if (ar.length == 0) "" else iter(0) + } + Console.println("[" + print1 + "]") + } + + val ar = Array(6, 2, 8, 5, 1) + println(ar) + sort(ar) + println(ar) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_this.check b/test/files/run/reify_this.check new file mode 100644 index 0000000000..af3d0652a9 --- /dev/null +++ b/test/files/run/reify_this.check @@ -0,0 +1,5 @@ +foo +false +2 +bar +2 \ No newline at end of file diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala new file mode 100644 index 0000000000..38ef72b6eb --- /dev/null +++ b/test/files/run/reify_this.scala @@ -0,0 +1,31 @@ +import scala.reflect._ +import scala.reflect.Code._ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +trait Eval { + def eval(code: Code[_]): Any = eval(code.tree) + + def eval(tree: Tree): Any = { + val settings = new Settings + val reporter = new ConsoleReporter(settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(tree) + toolbox.runExpr(ttree) + } +} + +object Test extends App with Eval { + // select a value from package + eval(lift{println("foo")}) + eval(lift{println((new Object).toString == (new Object).toString)}) + + // select a type from package + eval(lift{val x: Any = 2; println(x)}) + eval(lift{val x: Object = "bar"; println(x)}) + + // select a value from module + val x = 2 + eval(lift{println(x)}) +} diff --git a/test/files/run/t5274_2.check b/test/files/run/t5274_2.check new file mode 100644 index 0000000000..375536cc29 --- /dev/null +++ b/test/files/run/t5274_2.check @@ -0,0 +1,2 @@ +[6,2,8,5,1] +[1,2,5,6,8] diff --git a/test/files/run/t5274_2.scala b/test/files/run/t5274_2.scala new file mode 100644 index 0000000000..42991fe5d2 --- /dev/null +++ b/test/files/run/t5274_2.scala @@ -0,0 +1,57 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + /** Nested methods can use and even update everything + * visible in their scope (including local variables or + * arguments of enclosing methods). + */ + def sort(a: Array[Int]) { + + def swap(i: Int, j: Int) { + val t = a(i); a(i) = a(j); a(j) = t + } + + def sort1(l: Int, r: Int) { + val pivot = a((l + r) / 2) + var i = l + var j = r + while (i <= j) { + while (a(i) < pivot) i += 1 + while (a(j) > pivot) j -= 1 + if (i <= j) { + swap(i, j) + i += 1 + j -= 1 + } + } + if (l < j) sort1(l, j) + if (j < r) sort1(i, r) + } + + if (a.length > 0) + sort1(0, a.length - 1) + } + + def println(ar: Array[Int]) { + def print1 = { + def iter(i: Int): String = + ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "") + if (ar.length == 0) "" else iter(0) + } + Console.println("[" + print1 + "]") + } + + val ar = Array(6, 2, 8, 5, 1) + println(ar) + sort(ar) + println(ar) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5279.check b/test/files/run/t5279.check new file mode 100644 index 0000000000..f599e28b8a --- /dev/null +++ b/test/files/run/t5279.check @@ -0,0 +1 @@ +10 diff --git a/test/files/run/t5279.scala b/test/files/run/t5279.scala new file mode 100644 index 0000000000..39e7dd2c66 --- /dev/null +++ b/test/files/run/t5279.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + println(new Integer(10)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5415.check b/test/files/run/t5415.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/t5415.scala b/test/files/run/t5415.scala new file mode 100644 index 0000000000..3db356da86 --- /dev/null +++ b/test/files/run/t5415.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import scala.reflect.runtime.Mirror.ToolBox + +object Test extends App{ + case class Queryable2[T]() { def filter(predicate: T => Boolean) = ??? } + trait CoffeesTable{ def sales : Int } + val q = Queryable2[CoffeesTable]() + val code = scala.reflect.Code.lift{q.filter(_.sales > 5)} + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) +} diff --git a/test/pending/run/reify_closure1.check b/test/pending/run/reify_closure1.check deleted file mode 100644 index b2f7f08c17..0000000000 --- a/test/pending/run/reify_closure1.check +++ /dev/null @@ -1,2 +0,0 @@ -10 -10 diff --git a/test/pending/run/reify_closure1.scala b/test/pending/run/reify_closure1.scala deleted file mode 100644 index 825a38dc1d..0000000000 --- a/test/pending/run/reify_closure1.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - def foo[T](ys: List[T]): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { - x - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println(foo(List(1, 2, 3))(10)) - println(foo(List(1, 2, 3, 4))(10)) -} diff --git a/test/pending/run/reify_closure2a.check b/test/pending/run/reify_closure2a.check deleted file mode 100644 index c1f3abd7e6..0000000000 --- a/test/pending/run/reify_closure2a.check +++ /dev/null @@ -1,2 +0,0 @@ -11 -12 diff --git a/test/pending/run/reify_closure2a.scala b/test/pending/run/reify_closure2a.scala deleted file mode 100644 index b88bec005d..0000000000 --- a/test/pending/run/reify_closure2a.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - def foo(y: Int): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { - x + y - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println(foo(1)(10)) - println(foo(2)(10)) -} diff --git a/test/pending/run/reify_closure3a.check b/test/pending/run/reify_closure3a.check deleted file mode 100644 index c1f3abd7e6..0000000000 --- a/test/pending/run/reify_closure3a.check +++ /dev/null @@ -1,2 +0,0 @@ -11 -12 diff --git a/test/pending/run/reify_closure3a.scala b/test/pending/run/reify_closure3a.scala deleted file mode 100644 index 6414fa58a3..0000000000 --- a/test/pending/run/reify_closure3a.scala +++ /dev/null @@ -1,22 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - def foo(y: Int): Int => Int = { - def y1 = y - - val fun: reflect.Code[Int => Int] = x => { - x + y1 - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println(foo(1)(10)) - println(foo(2)(10)) -} diff --git a/test/pending/run/reify_closure4a.check b/test/pending/run/reify_closure4a.check deleted file mode 100644 index c1f3abd7e6..0000000000 --- a/test/pending/run/reify_closure4a.check +++ /dev/null @@ -1,2 +0,0 @@ -11 -12 diff --git a/test/pending/run/reify_closure4a.scala b/test/pending/run/reify_closure4a.scala deleted file mode 100644 index 99e9d82706..0000000000 --- a/test/pending/run/reify_closure4a.scala +++ /dev/null @@ -1,22 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - def foo(y: Int): Int => Int = { - val y1 = y - - val fun: reflect.Code[Int => Int] = x => { - x + y1 - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println(foo(1)(10)) - println(foo(2)(10)) -} diff --git a/test/pending/run/reify_closure5a.check b/test/pending/run/reify_closure5a.check deleted file mode 100644 index df9e19c591..0000000000 --- a/test/pending/run/reify_closure5a.check +++ /dev/null @@ -1,2 +0,0 @@ -13 -14 diff --git a/test/pending/run/reify_closure5a.scala b/test/pending/run/reify_closure5a.scala deleted file mode 100644 index 0ac53d5479..0000000000 --- a/test/pending/run/reify_closure5a.scala +++ /dev/null @@ -1,20 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - def foo[T](ys: List[T]): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { - x + ys.length - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println(foo(List(1, 2, 3))(10)) - println(foo(List(1, 2, 3, 4))(10)) -} diff --git a/test/pending/run/reify_closure6.check b/test/pending/run/reify_closure6.check deleted file mode 100644 index e521ea874d..0000000000 --- a/test/pending/run/reify_closure6.check +++ /dev/null @@ -1,7 +0,0 @@ -q = 1 -y = 1 -first invocation = 15 -q = 2 -y = 1 -second invocation = 17 -q after second invocation = 2 diff --git a/test/pending/run/reify_closure6.scala b/test/pending/run/reify_closure6.scala deleted file mode 100644 index 43ddfde28d..0000000000 --- a/test/pending/run/reify_closure6.scala +++ /dev/null @@ -1,28 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - var q = 0 - def foo[T](ys: List[T]): Int => Int = { - val z = 1 - var y = 0 - val fun: reflect.Code[Int => Int] = x => { - y += 1 - q += 1 - println("q = " + q) - println("y = " + y) - x + ys.length * z + q + y - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - dyn.asInstanceOf[Int => Int] - } - - println("first invocation = " + foo(List(1, 2, 3))(10)) - println("second invocation = " + foo(List(1, 2, 3, 4))(10)) - println("q after second invocation = " + q) -} diff --git a/test/pending/run/reify_closure7.check b/test/pending/run/reify_closure7.check deleted file mode 100644 index bf58b52bce..0000000000 --- a/test/pending/run/reify_closure7.check +++ /dev/null @@ -1,6 +0,0 @@ -q = 1 -y = 1 -first invocation = 15 -q = 2 -y = 2 -second invocation = 17 diff --git a/test/pending/run/reify_closure7.scala b/test/pending/run/reify_closure7.scala deleted file mode 100644 index 8933df23fa..0000000000 --- a/test/pending/run/reify_closure7.scala +++ /dev/null @@ -1,32 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - var q = 0 - var clo: Int => Int = null - def foo[T](ys: List[T]): Int => Int = { - val z = 1 - var y = 0 - val fun: reflect.Code[Int => Int] = x => { - y += 1 - q += 1 - println("q = " + q) - println("y = " + y) - x + ys.length * z + q + y - } - - if (clo == null) { - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) - clo = dyn.asInstanceOf[Int => Int] - } - - clo - } - - println("first invocation = " + foo(List(1, 2, 3))(10)) - println("second invocation = " + foo(List(1, 2, 3, 4))(10)) -} diff --git a/test/pending/run/reify_closure8a.check b/test/pending/run/reify_closure8a.check deleted file mode 100644 index 9a037142aa..0000000000 --- a/test/pending/run/reify_closure8a.check +++ /dev/null @@ -1 +0,0 @@ -10 \ No newline at end of file diff --git a/test/pending/run/reify_closure8a.scala b/test/pending/run/reify_closure8a.scala deleted file mode 100644 index 5e54bfc8c7..0000000000 --- a/test/pending/run/reify_closure8a.scala +++ /dev/null @@ -1,17 +0,0 @@ -import scala.reflect.Code._ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - class Foo(val y: Int) { - def fun = lift{y} - } - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(10).fun.tree) - val dyn = toolbox.runExpr(ttree) - val foo = dyn.asInstanceOf[Int] - println(foo) -} diff --git a/test/pending/run/reify_closures10.check b/test/pending/run/reify_closures10.check deleted file mode 100644 index fd3c81a4d7..0000000000 --- a/test/pending/run/reify_closures10.check +++ /dev/null @@ -1,2 +0,0 @@ -5 -5 diff --git a/test/pending/run/reify_closures10.scala b/test/pending/run/reify_closures10.scala deleted file mode 100644 index d0f895ae4d..0000000000 --- a/test/pending/run/reify_closures10.scala +++ /dev/null @@ -1,15 +0,0 @@ -import scala.reflect.Code._ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val x = 2 - val y = 3 - val code = lift{println(x + y); x + y} - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - println(toolbox.runExpr(ttree)) -} diff --git a/test/pending/run/reify_implicits.check b/test/pending/run/reify_implicits.check deleted file mode 100644 index e3aeb20f6b..0000000000 --- a/test/pending/run/reify_implicits.check +++ /dev/null @@ -1 +0,0 @@ -x = List(1, 2, 3, 4) diff --git a/test/pending/run/reify_implicits.scala b/test/pending/run/reify_implicits.scala deleted file mode 100644 index a15cef9c97..0000000000 --- a/test/pending/run/reify_implicits.scala +++ /dev/null @@ -1,21 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - implicit def arrayWrapper[A : ClassManifest](x: Array[A]) = - new { - def sort(p: (A, A) => Boolean) = { - util.Sorting.stableSort(x, p); x - } - } - val x = Array(2, 3, 1, 4) - println("x = "+ x.sort((x: Int, y: Int) => x < y).toList) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_sort.check b/test/pending/run/reify_sort.check deleted file mode 100644 index 375536cc29..0000000000 --- a/test/pending/run/reify_sort.check +++ /dev/null @@ -1,2 +0,0 @@ -[6,2,8,5,1] -[1,2,5,6,8] diff --git a/test/pending/run/reify_sort.scala b/test/pending/run/reify_sort.scala deleted file mode 100644 index 42991fe5d2..0000000000 --- a/test/pending/run/reify_sort.scala +++ /dev/null @@ -1,57 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - /** Nested methods can use and even update everything - * visible in their scope (including local variables or - * arguments of enclosing methods). - */ - def sort(a: Array[Int]) { - - def swap(i: Int, j: Int) { - val t = a(i); a(i) = a(j); a(j) = t - } - - def sort1(l: Int, r: Int) { - val pivot = a((l + r) / 2) - var i = l - var j = r - while (i <= j) { - while (a(i) < pivot) i += 1 - while (a(j) > pivot) j -= 1 - if (i <= j) { - swap(i, j) - i += 1 - j -= 1 - } - } - if (l < j) sort1(l, j) - if (j < r) sort1(i, r) - } - - if (a.length > 0) - sort1(0, a.length - 1) - } - - def println(ar: Array[Int]) { - def print1 = { - def iter(i: Int): String = - ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "") - if (ar.length == 0) "" else iter(0) - } - Console.println("[" + print1 + "]") - } - - val ar = Array(6, 2, 8, 5, 1) - println(ar) - sort(ar) - println(ar) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_this.check b/test/pending/run/reify_this.check deleted file mode 100644 index af3d0652a9..0000000000 --- a/test/pending/run/reify_this.check +++ /dev/null @@ -1,5 +0,0 @@ -foo -false -2 -bar -2 \ No newline at end of file diff --git a/test/pending/run/reify_this.scala b/test/pending/run/reify_this.scala deleted file mode 100644 index 38ef72b6eb..0000000000 --- a/test/pending/run/reify_this.scala +++ /dev/null @@ -1,31 +0,0 @@ -import scala.reflect._ -import scala.reflect.Code._ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -trait Eval { - def eval(code: Code[_]): Any = eval(code.tree) - - def eval(tree: Tree): Any = { - val settings = new Settings - val reporter = new ConsoleReporter(settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(tree) - toolbox.runExpr(ttree) - } -} - -object Test extends App with Eval { - // select a value from package - eval(lift{println("foo")}) - eval(lift{println((new Object).toString == (new Object).toString)}) - - // select a type from package - eval(lift{val x: Any = 2; println(x)}) - eval(lift{val x: Object = "bar"; println(x)}) - - // select a value from module - val x = 2 - eval(lift{println(x)}) -} diff --git a/test/pending/run/t5274_2.check b/test/pending/run/t5274_2.check deleted file mode 100644 index 375536cc29..0000000000 --- a/test/pending/run/t5274_2.check +++ /dev/null @@ -1,2 +0,0 @@ -[6,2,8,5,1] -[1,2,5,6,8] diff --git a/test/pending/run/t5274_2.scala b/test/pending/run/t5274_2.scala deleted file mode 100644 index 42991fe5d2..0000000000 --- a/test/pending/run/t5274_2.scala +++ /dev/null @@ -1,57 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - /** Nested methods can use and even update everything - * visible in their scope (including local variables or - * arguments of enclosing methods). - */ - def sort(a: Array[Int]) { - - def swap(i: Int, j: Int) { - val t = a(i); a(i) = a(j); a(j) = t - } - - def sort1(l: Int, r: Int) { - val pivot = a((l + r) / 2) - var i = l - var j = r - while (i <= j) { - while (a(i) < pivot) i += 1 - while (a(j) > pivot) j -= 1 - if (i <= j) { - swap(i, j) - i += 1 - j -= 1 - } - } - if (l < j) sort1(l, j) - if (j < r) sort1(i, r) - } - - if (a.length > 0) - sort1(0, a.length - 1) - } - - def println(ar: Array[Int]) { - def print1 = { - def iter(i: Int): String = - ar(i) + (if (i < ar.length-1) "," + iter(i+1) else "") - if (ar.length == 0) "" else iter(0) - } - Console.println("[" + print1 + "]") - } - - val ar = Array(6, 2, 8, 5, 1) - println(ar) - sort(ar) - println(ar) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5279.check b/test/pending/run/t5279.check deleted file mode 100644 index f599e28b8a..0000000000 --- a/test/pending/run/t5279.check +++ /dev/null @@ -1 +0,0 @@ -10 diff --git a/test/pending/run/t5279.scala b/test/pending/run/t5279.scala deleted file mode 100644 index 39e7dd2c66..0000000000 --- a/test/pending/run/t5279.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - println(new Integer(10)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5415.check b/test/pending/run/t5415.check deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/test/pending/run/t5415.scala b/test/pending/run/t5415.scala deleted file mode 100644 index 3db356da86..0000000000 --- a/test/pending/run/t5415.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import scala.reflect.runtime.Mirror.ToolBox - -object Test extends App{ - case class Queryable2[T]() { def filter(predicate: T => Boolean) = ??? } - trait CoffeesTable{ def sales : Int } - val q = Queryable2[CoffeesTable]() - val code = scala.reflect.Code.lift{q.filter(_.sales > 5)} - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) -} -- cgit v1.2.3 From 2d9dfe3077fa2b43a336548cad98a522215c52a9 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Thu, 2 Feb 2012 19:59:12 +0100 Subject: Add parallel Ctrie parallel collection. --- src/library/scala/collection/mutable/Ctrie.scala | 8 +- .../collection/parallel/ParIterableLike.scala | 2 +- .../collection/parallel/mutable/ParCtrie.scala | 139 +++++++++++++++++++++ .../collection/parallel/mutable/ParHashMap.scala | 1 - .../scala/collection/parallel/package.scala | 11 +- 5 files changed, 151 insertions(+), 10 deletions(-) create mode 100644 src/library/scala/collection/parallel/mutable/ParCtrie.scala diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index 84cceb44eb..e1a72d9511 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -844,7 +844,7 @@ object Ctrie extends MutableMapFactory[Ctrie] { } -private[mutable] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { +private[collection] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { var stack = new Array[Array[BasicNode]](7) var stackpos = new Array[Int](7) var depth = -1 @@ -910,10 +910,12 @@ private[mutable] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = } } else current = null + protected def newIterator(_ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_ct, _mustInit) + /** Returns a sequence of iterators over subsets of this iterator. * It's used to ease the implementation of splitters for a parallel version of the Ctrie. */ - protected def subdivide: Seq[Iterator[(K, V)]] = if (subiter ne null) { + protected def subdivide(): Seq[Iterator[(K, V)]] = if (subiter ne null) { // the case where an LNode is being iterated val it = subiter subiter = null @@ -927,7 +929,7 @@ private[mutable] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) stack(d) = arr1 stackpos(d) = -1 - val it = new CtrieIterator[K, V](ct, false) + val it = newIterator(ct, false) it.stack(0) = arr2 it.stackpos(0) = -1 it.depth = 0 diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index b24497371d..32e0e8a8ed 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -451,7 +451,7 @@ self: ParIterableLike[T, Repr, Sequential] => reduce((x, y) => if (cmp.lteq(f(x), f(y))) x else y) } - + def map[S, That](f: T => S)(implicit bf: CanBuildFrom[Repr, S, That]): That = if (bf(repr).isCombiner) { executeAndWaitResult(new Map[S, That](f, combinerFactory(() => bf(repr).asCombiner), splitter) mapResult { _.result }) } else seq.map(f)(bf2seq(bf)) diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala new file mode 100644 index 0000000000..d8c060e719 --- /dev/null +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -0,0 +1,139 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.parallel.mutable + + + +import scala.collection.generic._ +import scala.collection.parallel.Combiner +import scala.collection.parallel.IterableSplitter +import scala.collection.mutable.Ctrie +import scala.collection.mutable.CtrieIterator + + + +/** Parallel Ctrie collection. + * + * It has its bulk operations parallelized, but uses the snapshot operation + * to create the splitter. This means that parallel bulk operations can be + * called concurrently with the modifications. + * + * @author Aleksandar Prokopec + * @since 2.10 + */ +final class ParCtrie[K, V] private[mutable] (private val ctrie: Ctrie[K, V]) +extends ParMap[K, V] + with GenericParMapTemplate[K, V, ParCtrie] + with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]] + with ParCtrieCombiner[K, V] + with Serializable +{ + + def this() = this(new Ctrie) + + override def mapCompanion: GenericParMapCompanion[ParCtrie] = ParCtrie + + override def empty: ParCtrie[K, V] = ParCtrie.empty + + protected[this] override def newCombiner = ParCtrie.newCombiner + + override def seq = ctrie + + def splitter = new ParCtrieSplitter(ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true) + + override def size = ctrie.size + + override def clear() = ctrie.clear() + + def result = this + + def get(key: K): Option[V] = ctrie.get(key) + + def put(key: K, value: V): Option[V] = ctrie.put(key, value) + + def update(key: K, value: V): Unit = ctrie.update(key, value) + + def remove(key: K): Option[V] = ctrie.remove(key) + + def +=(kv: (K, V)): this.type = { + ctrie.+=(kv) + this + } + + def -=(key: K): this.type = { + ctrie.-=(key) + this + } + + override def stringPrefix = "ParCtrie" + +} + + +private[collection] class ParCtrieSplitter[K, V](ct: Ctrie[K, V], mustInit: Boolean) +extends CtrieIterator[K, V](ct, mustInit) + with IterableSplitter[(K, V)] +{ + // only evaluated if `remaining` is invoked (which is not used by most tasks) + lazy val totalsize = ct.iterator.size // TODO improve to lazily compute sizes + var iterated = 0 + + protected override def newIterator(_ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_ct, _mustInit) + + def dup = null // TODO necessary for views + + override def next() = { + iterated += 1 + super.next() + } + + def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] + + def remaining: Int = totalsize - iterated +} + + +/** Only used within the `ParCtrie`. */ +private[mutable] trait ParCtrieCombiner[K, V] extends Combiner[(K, V), ParCtrie[K, V]] { + + def combine[N <: (K, V), NewTo >: ParCtrie[K, V]](other: Combiner[N, NewTo]): Combiner[N, NewTo] = if (this eq other) this else { + throw new UnsupportedOperationException("This shouldn't have been called in the first place.") + + val thiz = this.asInstanceOf[ParCtrie[K, V]] + val that = other.asInstanceOf[ParCtrie[K, V]] + val result = new ParCtrie[K, V] + + result ++= thiz.iterator + result ++= that.iterator + + result + } + + override def canBeShared = true + +} + + +object ParCtrie extends ParMapFactory[ParCtrie] { + + def empty[K, V]: ParCtrie[K, V] = new ParCtrie[K, V] + + def newCombiner[K, V]: Combiner[(K, V), ParCtrie[K, V]] = new ParCtrie[K, V] + + implicit def canBuildFrom[K, V]: CanCombineFrom[Coll, (K, V), ParCtrie[K, V]] = new CanCombineFromMap[K, V] + +} + + + + + + + + diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 3b4d3dc0b0..15ffd3fdd2 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -12,7 +12,6 @@ package mutable - import collection.generic._ import collection.mutable.DefaultEntry import collection.mutable.HashEntry diff --git a/src/library/scala/collection/parallel/package.scala b/src/library/scala/collection/parallel/package.scala index cdb9944fdc..8f19d0ecdb 100644 --- a/src/library/scala/collection/parallel/package.scala +++ b/src/library/scala/collection/parallel/package.scala @@ -196,22 +196,23 @@ package parallel { * the receiver (which will be the return value). */ private[parallel] abstract class BucketCombiner[-Elem, +To, Buck, +CombinerType <: BucketCombiner[Elem, To, Buck, CombinerType]] - (private val bucketnumber: Int) + (private val bucketnumber: Int) extends Combiner[Elem, To] { //self: EnvironmentPassingCombiner[Elem, To] => protected var buckets: Array[UnrolledBuffer[Buck]] @uncheckedVariance = new Array[UnrolledBuffer[Buck]](bucketnumber) protected var sz: Int = 0 - + def size = sz - + def clear() = { buckets = new Array[UnrolledBuffer[Buck]](bucketnumber) sz = 0 } - + def beforeCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} + def afterCombine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]) {} - + def combine[N <: Elem, NewTo >: To](other: Combiner[N, NewTo]): Combiner[N, NewTo] = { if (this eq other) this else other match { -- cgit v1.2.3 From 6f89da9e55315a2299ae8c4ab8c772936b862a85 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 26 Dec 2011 19:07:21 +0100 Subject: [vpm] factored out reusing treemakers (used by CSE) before, we were mutating treemakers in-place when they were reused no more mutation, and CSE is now self-contained interestingly, we were considering all FunTreeMakers as potentially reused, but only CondTreeMakers ever did anything with that flag should be clearer now that only those are ever reused simplified substonly treemaker a bit overall cleanup to prepare for switching to new-style detection of MatchStrategy delaying wrapping in function to simplify optimizing codegen logic --- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 238 +++++++++++---------- 1 file changed, 128 insertions(+), 110 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index b1e02cb062..49786813e8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -32,9 +32,6 @@ import Flags.{ CASE => _, _ } d => body)))))(scrut) TODO: - - optimizer loops on virtpatmat compiler? - - - don't orElse a failure case at the end if there's a default case - implement spec more closely (see TODO's below) - fix inlining of methods in nested objects @@ -139,6 +136,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // must use type `tp`, which is provided by extractor's result, not the type expected by binder, // as b.info may be based on a Typed type ascription, which has not been taken into account yet by the translation // (it will later result in a type test when `tp` is not a subtype of `b.info`) + // TODO: can we simplify this, together with the Bound case? (extractor.subPatBinders, extractor.subPatTypes).zipped foreach { case (b, tp) => b setInfo tp } // println("changing "+ b +" : "+ b.info +" -> "+ tp); // println("translateExtractorPattern checking parameter type: "+ (patBinder, patBinder.info.widen, extractor.paramType, patBinder.info.widen <:< extractor.paramType)) @@ -215,12 +213,8 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => and it binds the variable name to that value. **/ case Bound(subpatBinder, p) => - // TreeMaker with empty list of trees only performs the substitution subpatBinder --> patBinder - // println("rebind "+ subpatBinder +" to "+ patBinder) - withSubPats(List(SubstOnlyTreeMaker(Substitution(subpatBinder, CODE.REF(patBinder)))), - // the symbols are markers that may be used to refer to the result of the extractor in which the corresponding tree is nested - // it's the responsibility of the treemaker to replace this symbol by a reference that - // selects that result on the function symbol of the flatMap call that binds to the result of this extractor + // replace subpatBinder by patBinder (as if the Bind was not there) + withSubPats(List(SubstOnlyTreeMaker(subpatBinder, patBinder)), // must be patBinder, as subpatBinder has the wrong info: even if the bind assumes a better type, this is not guaranteed until we cast (patBinder, p) ) @@ -651,6 +645,9 @@ defined class Foo */ lazy val optimizingCodeGen = matchingMonadType.typeSymbol eq OptionClass abstract class TreeMaker { + /** captures the scope and the value of the bindings in patterns + * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed) + */ def substitution: Substitution = if (currSub eq null) localSubstitution else currSub @@ -668,7 +665,6 @@ defined class Foo */ // build Tree that chains `next` after the current extractor def chainBefore(next: Tree, pt: Type): Tree - def treesToHoist: List[Tree] = Nil } case class TrivialTreeMaker(tree: Tree) extends TreeMaker { @@ -682,20 +678,13 @@ defined class Foo */ atPos(body.pos)(substitution(pmgen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here } - case class SubstOnlyTreeMaker(localSubstitution: Substitution) extends TreeMaker { + case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker { + val localSubstitution = Substitution(prevBinder, CODE.REF(nextBinder)) def chainBefore(next: Tree, pt: Type): Tree = substitution(next) } abstract class FunTreeMaker extends TreeMaker { val nextBinder: Symbol - - // for CSE (used iff optimizingCodeGen) - // TODO: factor this out -- don't mutate treemakers - var reused: Boolean = false - def reusedBinders: List[Symbol] = Nil - override def treesToHoist: List[Tree] = { import CODE._ - reusedBinders map { b => VAL(b) === pmgen.mkZero(b.info) } - } } abstract class FreshFunTreeMaker extends FunTreeMaker { @@ -706,44 +695,12 @@ defined class Foo */ lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) } - // TODO: factor out optimization-specific stuff into codegen - abstract class CondTreeMaker extends FreshFunTreeMaker { import CODE._ + abstract class CondTreeMaker extends FreshFunTreeMaker { val cond: Tree val res: Tree - // for CSE (used iff optimizingCodeGen) - // must set reused before! - override lazy val reusedBinders = if(reused) List(freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE, nextBinder setFlag MUTABLE) else Nil - def storedCond = reusedBinders(0) - def storedRes = reusedBinders(1) - def chainBefore(next: Tree, pt: Type): Tree = - if (!reused) - atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next))) - else { // for CSE (used iff optimizingCodeGen) - IF (cond) THEN BLOCK( - storedCond === TRUE, - storedRes === res, - substitution(next).duplicate // TODO: finer-grained dup'ing - ) ELSE pmgen.zero - } - } - - // for CSE (used iff optimizingCodeGen) - case class ReusingCondTreeMaker(dropped_priors: List[(TreeMaker, Option[TreeMaker])]) extends TreeMaker { import CODE._ - lazy val localSubstitution = { - val (from, to) = dropped_priors.collect {case (dropped: CondTreeMaker, Some(prior: CondTreeMaker)) => (dropped.nextBinder, REF(prior.storedRes))}.unzip - val oldSubs = dropped_priors.collect {case (dropped: TreeMaker, _) => dropped.substitution} - oldSubs.foldLeft(Substitution(from, to))(_ >> _) - } - - def chainBefore(next: Tree, pt: Type): Tree = { - val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: CondTreeMaker)) => ctm}.get.storedCond) - - IF (cond) THEN BLOCK( - substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) - ) ELSE pmgen.zero - } + atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next))) } /** @@ -754,12 +711,13 @@ defined class Foo */ * in this function's body, and all the subsequent ones, references to the symbols in `from` will be replaced by the corresponding tree in `to` */ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker { - def chainBefore(next: Tree, pt: Type): Tree = atPos(extractor.pos)( - if (extractorReturnsBoolean) pmgen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext(next))) - else pmgen.flatMap(extractor, pmgen.fun(nextBinder, substitution(condAndNext(next)))) - ) - - private def condAndNext(next: Tree): Tree = extraCond map (pmgen.condOptimized(_, next)) getOrElse next + def chainBefore(next: Tree, pt: Type): Tree = { + val condAndNext = extraCond map (pmgen.condOptimized(_, next)) getOrElse next + atPos(extractor.pos)( + if (extractorReturnsBoolean) pmgen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext)) + else pmgen.flatMap(extractor, nextBinder, substitution(condAndNext)) + ) + } override def toString = "X"+(extractor, nextBinder) } @@ -768,10 +726,7 @@ defined class Foo */ case class ProductExtractorTreeMaker(prevBinder: Symbol, extraCond: Option[Tree], localSubstitution: Substitution) extends TreeMaker { import CODE._ def chainBefore(next: Tree, pt: Type): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL - val cond = extraCond match { - case None => nullCheck - case Some(c) => nullCheck AND c - } + val cond = extraCond map (nullCheck AND _) getOrElse nullCheck pmgen.condOptimized(cond, substitution(next)) } @@ -957,6 +912,8 @@ defined class Foo */ override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id } +//// CSE + /** a flow-sensitive, generalised, common sub-expression elimination * reuse knowledge from performed tests * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality) @@ -1031,7 +988,7 @@ defined class Foo */ | GuardTreeMaker(_) | ProductExtractorTreeMaker(_, Some(_), _) => Havoc case AlternativesTreeMaker(_, _, _) => Havoc // TODO: can do better here - case SubstOnlyTreeMaker(_) => Top + case SubstOnlyTreeMaker(_, _) => Top case BodyTreeMaker(_, _) => Havoc }, tm) } @@ -1067,7 +1024,11 @@ defined class Foo */ // then, collapse these contiguous sequences of reusing tests // store the result of the final test and the intermediate results in hoisted mutable variables (TODO: optimize: don't store intermediate results that aren't used) // replace each reference to a variable originally bound by a collapsed test by a reference to the hoisted variable - testss map { tests => + val reused = new collection.mutable.HashMap[TreeMaker, ReusedCondTreeMaker] + var okToCall = false + val reusedOrOrig = (tm: TreeMaker) => {assert(okToCall); reused.getOrElse(tm, tm)} + + val res = testss map { tests => var currDeps = Set[Cond]() val (sharedPrefix, suffix) = tests span { test => (test.cond eq Top) || (for( @@ -1079,18 +1040,66 @@ defined class Foo */ } val collapsedTreeMakers = if (sharedPrefix.nonEmpty) { // even sharing prefixes of length 1 brings some benefit (overhead-percentage for compiler: 26->24%, lib: 19->16%) - for (test <- sharedPrefix; reusedTest <- test.reuses; if reusedTest.treeMaker.isInstanceOf[FunTreeMaker]) - reusedTest.treeMaker.asInstanceOf[FunTreeMaker].reused = true + for (test <- sharedPrefix; reusedTest <- test.reuses) reusedTest.treeMaker match { + case reusedCTM: CondTreeMaker => reused(reusedCTM) = ReusedCondTreeMaker(reusedCTM) + case _ => + } + // println("sharedPrefix: "+ sharedPrefix) for (lastShared <- sharedPrefix.reverse.dropWhile(_.cond eq Top).headOption; lastReused <- lastShared.reuses) - yield ReusingCondTreeMaker(sharedPrefix map (t => (t.treeMaker, t.reuses map (_.treeMaker)))) :: suffix.map(_.treeMaker) + yield ReusingCondTreeMaker(sharedPrefix, reusedOrOrig) :: suffix.map(_.treeMaker) } else None collapsedTreeMakers getOrElse tests.map(_.treeMaker) // sharedPrefix need not be empty (but it only contains Top-tests, which are dropped above) } + okToCall = true // TODO: remove (debugging) + + res mapConserve (_ mapConserve reusedOrOrig) } + object ReusedCondTreeMaker { + def apply(orig: CondTreeMaker) = new ReusedCondTreeMaker(orig.prevBinder, orig.nextBinder, orig.cond, orig.res, orig.pos) + } + class ReusedCondTreeMaker(prevBinder: Symbol, val nextBinder: Symbol, cond: Tree, res: Tree, pos: Position) extends TreeMaker { import CODE._ + lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) + lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE + lazy val treesToHoist: List[Tree] = { + nextBinder setFlag MUTABLE + List(storedCond, nextBinder) map { b => VAL(b) === pmgen.mkZero(b.info) } + } + + // TODO: finer-grained duplication + def chainBefore(next: Tree, pt: Type): Tree = + atPos(pos)(pmgen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) + } + + case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ + lazy val dropped_priors = sharedPrefix map (t => (toReused(t.treeMaker), t.reuses map (test => toReused(test.treeMaker)))) + lazy val localSubstitution = { + val (from, to) = dropped_priors.collect { + case (dropped: CondTreeMaker, Some(prior: ReusedCondTreeMaker)) => + (dropped.nextBinder, REF(prior.nextBinder)) + }.unzip + val oldSubs = dropped_priors.collect { + case (dropped: TreeMaker, _) => + dropped.substitution + } + oldSubs.foldLeft(Substitution(from, to))(_ >> _) + } + + def chainBefore(next: Tree, pt: Type): Tree = { + val cond = REF(dropped_priors.reverse.collectFirst{case (_, Some(ctm: ReusedCondTreeMaker)) => ctm}.get.storedCond) + + IF (cond) THEN BLOCK( + substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) + ) ELSE pmgen.zero + } + } + + +//// DCE + // TODO: non-trivial dead-code elimination // e.g., the following match should compile to a simple instanceof: // case class Ident(name: String) @@ -1101,18 +1110,7 @@ defined class Foo */ } - def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) - - // a foldLeft to accumulate the localSubstitution left-to-right - // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution - def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { - var accumSubst: Substitution = initial - treeMakers foreach { maker => - maker incorporateOuterSubstitution accumSubst - accumSubst = maker.substitution - } - removeSubstOnly(treeMakers) - } +//// SWITCHES object SwitchablePattern { def unapply(pat: Tree) = pat match { case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches @@ -1205,6 +1203,20 @@ defined class Foo */ def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt) + + def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) + + // a foldLeft to accumulate the localSubstitution left-to-right + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { + var accumSubst: Substitution = initial + treeMakers foreach { maker => + maker incorporateOuterSubstitution accumSubst + accumSubst = maker.substitution + } + removeSubstOnly(treeMakers) + } + // calls propagateSubstitution on the treemakers def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){ val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them @@ -1232,12 +1244,15 @@ defined class Foo */ val combinedCases = cases.map(combineExtractors(_, pt)).reduceLeft(pmgen.typedOrElse(optPt)) - toHoist = (for (treeMakers <- cases; tm <- treeMakers; hoisted <- tm.treesToHoist) yield hoisted).toList + toHoist = ( + for (treeMakers <- cases) + yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} + ).flatten.flatten.toList - (pmgen.fun(scrutSym, combinedCases), hasDefault) + (combinedCases, hasDefault) } else (pmgen.zero, false) - val expr = pmgen.runOrElse(scrut, matcher, scrutSym.info, if (isFullyDefined(pt)) pt else NoType, hasDefault) + val expr = pmgen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault) if (toHoist isEmpty) expr else Block(toHoist, expr) } @@ -1248,8 +1263,6 @@ defined class Foo */ def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree = treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt)) - - // TODO: do this during tree construction, but that will require tracking the current owner in treemakers // TODO: assign more fine-grained positions // fixes symbol nesting, assigns positions @@ -1328,9 +1341,10 @@ defined class Foo */ // codegen relevant to the structure of the translation (how extractors are combined) trait AbsCodeGen { import CODE.UNIT - def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree - def flatMap(a: Tree, b: Tree): Tree + def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree + def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree def flatMapGuard(cond: Tree, next: Tree): Tree def fun(arg: Symbol, body: Tree): Tree def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree @@ -1359,7 +1373,8 @@ defined class Foo */ trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => // methods in MatchingStrategy (the monad companion) -- used directly in translation - def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean): Tree = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutTp, resTp) APPLY (scrut) APPLY (matcher) // matchingStrategy.runOrElse(scrut)(matcher) + def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree + = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutSym.info, resTp) APPLY (scrut) APPLY (fun(scrutSym, matcher)) // matchingStrategy.runOrElse(scrut)(matcher) // *only* used to wrap the RHS of a body (isDefinedAt synthesis relies on this) def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (matchingStrategy DOT vpmName.one) (_asInstanceOf(res, bodyPt, force = true)) // matchingStrategy.one(res), like one, but blow this one away for isDefinedAt (since it's the RHS of a case) def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero @@ -1368,13 +1383,14 @@ defined class Foo */ trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => // methods in the monad instance -- used directly in translation - def flatMap(a: Tree, b: Tree): Tree = (a DOT vpmName.flatMap)(b) + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase) // TODO: the trees generated by flatMapCond and flatMapGuard may need to be distinguishable by exhaustivity checking -- they aren't right now def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, - nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), fun(nextBinder, next)) + nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next) def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, UnitClass.tpe), UnitClass.tpe, next) + def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = throw new UnsupportedOperationException("Can't optimize under user-defined monad.") } // when we know we're targetting Option, do some inlining the optimizer won't do @@ -1394,18 +1410,17 @@ defined class Foo */ @inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass) lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE - override def runOrElse(scrut: Tree, matcher: Tree, scrutTp: Type, resTp: Type, hasDefault: Boolean) = { - val Function(List(x: ValDef), body) = matcher + override def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = { matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245 BLOCK( - VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite... - VAL(x.symbol) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for x.symbol in body -- the owner structure is repaired by fixerUpper + VAL(zeroSym) === REF(NoneModule), // TODO: can we just get rid of explicitly emitted zero? don't know how to do that as a local rewrite... + VAL(scrutSym) === scrut, // reuse the symbol of the function's argument to avoid creating a fresh one and substituting it for scrutSym in `matcher` -- the owner structure is repaired by fixerUpper VAL(matchRes) === mkZero(matchRes.info), // must cast to deal with GADT typing, hence the private mkZero above VAL(keepGoing) === TRUE, - body, + matcher, if(hasDefault) REF(matchRes) - else (IF (REF(keepGoing)) THEN MATCHERROR(REF(x.symbol)) ELSE REF(matchRes)) + else (IF (REF(keepGoing)) THEN MATCHERROR(REF(scrutSym)) ELSE REF(matchRes)) ) } @@ -1424,20 +1439,16 @@ defined class Foo */ // guard is only used by flatMapCond and flatMapGuard, which are overridden override def guard(c: Tree, then: Tree, tp: Type): Tree = throw new NotImplementedError("guard is never called by optimizing codegen") - override def flatMap(opt: Tree, fun: Tree): Tree = fun match { - case Function(List(x: ValDef), body) => - val tp = inMatchMonad(x.symbol.tpe) - val vs = freshSym(opt.pos, tp, "o") - val isEmpty = tp member vpmName.isEmpty - val get = tp member vpmName.get - val v = VAL(vs) === opt - - BLOCK( - v, - IF (vs DOT isEmpty) THEN zero ELSE typedSubst(body, List(x.symbol), List(vs DOT get)) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor) - ) - case _ => println("huh?") - (opt DOT vpmName.flatMap)(fun) + override def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = { + val tp = inMatchMonad(b.tpe) + val prevSym = freshSym(prev.pos, tp, "o") + val isEmpty = tp member vpmName.isEmpty + val get = tp member vpmName.get + + BLOCK( + VAL(prevSym) === prev, + IF (prevSym DOT isEmpty) THEN zero ELSE typedSubst(next, List(b), List(prevSym DOT get)) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor) + ) } override def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = { @@ -1453,6 +1464,13 @@ defined class Foo */ next ) ELSE zero + override def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = + IF (cond) THEN BLOCK( + condSym === TRUE, + nextBinder === res, + next + ) ELSE zero + override def flatMapGuard(guardTree: Tree, next: Tree): Tree = IF (guardTree) THEN next ELSE zero } -- cgit v1.2.3 From 03f00fe232c35189682341e39fac487ed2a70a8c Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 28 Dec 2011 19:06:49 +0100 Subject: [vpm] __match determines match semantics; virtualization determine match strategy by typing `__match` factored out the interface to generate code in this monad, cleaned up codegen a bit no longer solving a context bound to determine the match strategy and the monad's type constructor it's too expensive don't consider implicits looking for __match implicit search causes HUGE slowdowns -- now the overhead is about 4% compared to just assuming there's no __match in scope to support virtualization&staging, we use the type of `__match.one` as the prototype for how to wrap "pure" types and types "in the monad" pure types T are wrapped as P[T], and T goes into the monad as M[T], if one is defined as: def one[T](x: P[T]): M[T] for staging, P will typically be the Rep type constructor, and type M[T] = Rep[Option[T]] furthermore, naive codegen no longer supplies type information -- type inference will have to work it out optimized codegen still does, of course, and that's enough since we only bootstrap that way TODO: improve the test (currently the condition is not represented) --- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 416 ++++++++++----------- src/library/scala/MatchingStrategy.scala | 27 -- test/files/run/virtpatmat_staging.check | 1 + test/files/run/virtpatmat_staging.flags | 1 + test/files/run/virtpatmat_staging.scala | 52 +++ 5 files changed, 252 insertions(+), 245 deletions(-) delete mode 100644 src/library/scala/MatchingStrategy.scala create mode 100644 test/files/run/virtpatmat_staging.check create mode 100644 test/files/run/virtpatmat_staging.flags create mode 100644 test/files/run/virtpatmat_staging.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 49786813e8..aef85206fa 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -16,26 +16,16 @@ import Flags.{ CASE => _, _ } * (lifting the body of the case into the monad using `one`). * * Cases are combined into a pattern match using the `orElse` combinator (the implicit failure case is expressed using the monad's `zero`). - * - * The monad `M` in which the pattern match is interpreted is determined by solving `implicitly[MatchingStrategy[M]]` for M. - * Predef provides the default, `OptionMatching` - - * Example translation: TODO - - scrut match { case Person(father@Person(_, fatherName), name) if fatherName == name => } - scrut match { case Person(father, name) => father match {case Person(_, fatherName) => }} - Person.unapply(scrut) >> ((father, name) => (Person.unapply(father) >> (_, fatherName) => check(fatherName == name) >> (_ => body))) - - (a => (Person.unapply(a).>>( - b => Person.unapply(b._1).>>( - c => check(c._2 == b._2).>>( - d => body)))))(scrut) - -TODO: - - implement spec more closely (see TODO's below) - - fix inlining of methods in nested objects + * TODO: + * - interaction with CPS + * - Array patterns + * - implement spec more closely (see TODO's) + * - DCE + * - use manifests for type testing + * * (longer-term) TODO: + * - user-defined unapplyProd * - recover GADT typing by locally inserting implicit witnesses to type equalities derived from the current case, and considering these witnesses during subtyping (?) * - recover exhaustivity and unreachability checking using a variation on the type-safe builder pattern */ @@ -43,26 +33,12 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => import global._ import definitions._ - class MatchTranslator(typer: Typer) extends MatchCodeGen { + class MatchTranslator(val typer: Typer) extends MatchCodeGen { def typed(tree: Tree, mode: Int, pt: Type): Tree = typer.typed(tree, mode, pt) // for MatchCodeGen -- imports don't provide implementations for abstract members import typer._ import typeDebug.{ ptTree, ptBlock, ptLine } - def solveContextBound(contextBoundTp: Type): (Tree, Type) = { - val solSym = NoSymbol.newTypeParameter(newTypeName("SolveImplicit$")) - val param = solSym.setInfo(contextBoundTp.typeSymbol.typeParams(0).info.cloneInfo(solSym)) // TypeBounds(NothingClass.typeConstructor, baseTp) - val pt = appliedType(contextBoundTp, List(param.tpeHK)) - val savedUndets = context.undetparams - - context.undetparams = param :: context.undetparams - val result = inferImplicit(EmptyTree, pt, false, false, context) - context.undetparams = savedUndets - - (result.tree, result.subst.to(result.subst.from indexOf param)) - } - - lazy val (matchingStrategy, matchingMonadType) = solveContextBound(MatchingStrategyClass.typeConstructor) /** Implement a pattern match by turning its cases (including the implicit failure case) * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. @@ -72,7 +48,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => * * NOTE: the resulting tree is not type checked, nor are nested pattern matches transformed * thus, you must typecheck the result (and that will in turn translate nested matches) - * this could probably optimized... (but note that the matchingStrategy must be solved for each nested patternmatch) + * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch) */ def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type): Tree = { // we don't transform after typers @@ -82,7 +58,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen)) - val scrutSym = freshSym(scrut.pos, scrutType) + val scrutSym = freshSym(scrut.pos, pureType(scrutType)) val okPt = repeatedToSeq(pt) // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, context.owner) @@ -260,7 +236,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => if (guard == EmptyTree) Nil else List(GuardTreeMaker(guard)) - // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by pmgen.one), + // TODO: 1) if we want to support a generalisation of Kotlin's patmat continue, must not hard-wire lifting into the monad (which is now done by codegen.one), // so that user can generate failure when needed -- use implicit conversion to lift into monad on-demand? // to enable this, probably need to move away from Option to a monad specific to pattern-match, // so that we can return Option's from a match without ambiguity whether this indicates failure in the monad, or just some result in the monad @@ -373,34 +349,32 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => protected lazy val expectedLength = lastIndexingBinder - firstIndexingBinder + 1 protected lazy val minLenToCheck = if(lastIsStar) 1 else 0 protected def seqTree(binder: Symbol) = tupleSel(binder)(firstIndexingBinder+1) - protected def tupleSel(binder: Symbol)(i: Int): Tree = pmgen.tupleSel(binder)(i) + protected def tupleSel(binder: Symbol)(i: Int): Tree = codegen.tupleSel(binder)(i) // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require isSeq protected def subPatRefsSeq(binder: Symbol): List[Tree] = { - // only relevant if isSeq: (here to avoid capturing too much in the returned closure) - val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder)) - val nbIndexingIndices = indexingIndices.length + val indexingIndices = (0 to (lastIndexingBinder-firstIndexingBinder)) + val nbIndexingIndices = indexingIndices.length - // this error is checked by checkStarPatOK - // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) + // this error-condition has already been checked by checkStarPatOK: + // if(isSeq) assert(firstIndexingBinder + nbIndexingIndices + (if(lastIsStar) 1 else 0) == nbSubPats, "(resultInMonad, ts, subPatTypes, subPats)= "+(resultInMonad, ts, subPatTypes, subPats)) // there are `firstIndexingBinder` non-seq tuple elements preceding the Seq (((1 to firstIndexingBinder) map tupleSel(binder)) ++ // then we have to index the binder that represents the sequence for the remaining subpatterns, except for... - (indexingIndices map pmgen.index(seqTree(binder))) ++ + (indexingIndices map codegen.index(seqTree(binder))) ++ // the last one -- if the last subpattern is a sequence wildcard: drop the prefix (indexed by the refs on the line above), return the remainder (if(!lastIsStar) Nil else List( if(nbIndexingIndices == 0) seqTree(binder) - else pmgen.drop(seqTree(binder))(nbIndexingIndices)))).toList + else codegen.drop(seqTree(binder))(nbIndexingIndices)))).toList } // the trees that select the subpatterns on the extractor's result, referenced by `binder` // require (nbSubPats > 0 && (!lastIsStar || isSeq)) - protected def subPatRefs(binder: Symbol): List[Tree] = { + protected def subPatRefs(binder: Symbol): List[Tree] = if (nbSubPats == 0) Nil else if (isSeq) subPatRefsSeq(binder) else ((1 to nbSubPats) map tupleSel(binder)).toList - } protected def lengthGuard(binder: Symbol): Option[Tree] = // no need to check unless it's an unapplySeq and the minimal length is non-trivially satisfied @@ -421,7 +395,9 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => } } - // TODO: to be called when there's a def unapplyProd(x: T): Product_N + // TODO: to be called when there's a def unapplyProd(x: T): U + // U must have N members _1,..., _N -- the _i are type checked, call their type Ti, + // // for now only used for case classes -- pretending there's an unapplyProd that's the identity (and don't call it) class ExtractorCallProd(fun: Tree, args: List[Tree]) extends ExtractorCall(args) { // TODO: fix the illegal type bound in pos/t602 -- type inference messes up before we get here: @@ -433,15 +409,15 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // private val extractorTp = if (wellKinded(fun.tpe)) fun.tpe else existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType) // println("ExtractorCallProd: "+ (fun.tpe, existentialAbstraction(origExtractorTp.typeParams, origExtractorTp.resultType))) // println("ExtractorCallProd: "+ (fun.tpe, args map (_.tpe))) - private def extractorTp = fun.tpe + private def constructorTp = fun.tpe def isTyped = fun.isTyped // to which type should the previous binder be casted? - def paramType = extractorTp.finalResultType + def paramType = constructorTp.finalResultType def isSeq: Boolean = rawSubPatTypes.nonEmpty && isRepeatedParamType(rawSubPatTypes.last) - protected def rawSubPatTypes = extractorTp.paramTypes + protected def rawSubPatTypes = constructorTp.paramTypes // binder has type paramType def treeMaker(binder: Symbol, pos: Position): TreeMaker = { @@ -450,31 +426,20 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => } /* TODO: remove special case when the following bug is fixed -scala> :paste -// Entering paste mode (ctrl-D to finish) - class Foo(x: Other) { x._1 } // BUG: can't refer to _1 if its defining class has not been type checked yet case class Other(y: String) - -// Exiting paste mode, now interpreting. - -:8: error: value _1 is not a member of Other - class Foo(x: Other) { x._1 } - ^ - -scala> case class Other(y: String) -defined class Other - -scala> class Foo(x: Other) { x._1 } -defined class Foo */ +-- this is ok: +case class Other(y: String) +class Foo(x: Other) { x._1 } // no error in this order +*/ override protected def tupleSel(binder: Symbol)(i: Int): Tree = { import CODE._ // reference the (i-1)th case accessor if it exists, otherwise the (i-1)th tuple component val caseAccs = binder.info.typeSymbol.caseFieldAccessors if (caseAccs isDefinedAt (i-1)) REF(binder) DOT caseAccs(i-1) - else pmgen.tupleSel(binder)(i) + else codegen.tupleSel(binder)(i) } - override def toString(): String = "case class "+ (if (extractorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args + override def toString(): String = "case class "+ (if (constructorTp eq null) fun else paramType.typeSymbol) +" with arguments "+ args } class ExtractorCallRegular(extractorCallIncludingDummy: Tree, args: List[Tree]) extends ExtractorCall(args) { @@ -489,7 +454,7 @@ defined class Foo */ def treeMaker(patBinderOrCasted: Symbol, pos: Position): TreeMaker = { // the extractor call (applied to the binder bound by the flatMap corresponding to the previous (i.e., enclosing/outer) pattern) val extractorApply = atPos(pos)(spliceApply(patBinderOrCasted)) - val binder = freshSym(pos, resultInMonad) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type + val binder = freshSym(pos, pureType(resultInMonad)) // can't simplify this when subPatBinders.isEmpty, since UnitClass.tpe is definitely wrong when isSeq, and resultInMonad should always be correct since it comes directly from the extractor's result type ExtractorTreeMaker(extractorApply, lengthGuard(binder), binder, Substitution(subPatBinders, subPatRefs(binder)))(resultType.typeSymbol == BooleanClass) } @@ -518,12 +483,7 @@ defined class Foo */ // turn an extractor's result type into something `monadTypeToSubPatTypesAndRefs` understands protected lazy val resultInMonad: Type = if(!hasLength(tpe.paramTypes, 1)) ErrorType else { if (resultType.typeSymbol == BooleanClass) UnitClass.tpe - else { - val monadArgs = resultType.baseType(matchingMonadType.typeSymbol).typeArgs - // assert(monadArgs.length == 1, "unhandled extractor type: "+ extractorTp) // TODO: overloaded unapply?? - if(monadArgs.length == 1) monadArgs(0) - else ErrorType - } + else matchMonadResult(resultType) } protected lazy val rawSubPatTypes = @@ -549,10 +509,10 @@ defined class Foo */ // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor? val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC - val outerCheck = (Select(pmgen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix + val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix // first check cond, since that should ensure we're not selecting outer on null - pmgen.and(cond, outerCheck) + codegen.and(cond, outerCheck) } else cond @@ -560,7 +520,7 @@ defined class Foo */ // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest) def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt) - def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(pmgen._isInstanceOf(binder, pt)) + def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(codegen._isInstanceOf(binder, pt)) /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms: - A reference to a class C, p.C, or T#C. @@ -592,7 +552,7 @@ defined class Foo */ // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null` // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false") def genEqualsAndInstanceOf(sym: Symbol): Tree - = pmgen._equals(REF(sym), patBinder) AND pmgen._isInstanceOf(patBinder, pt.widen) + = codegen._equals(REF(sym), patBinder) AND codegen._isInstanceOf(patBinder, pt.widen) def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe @@ -606,7 +566,7 @@ defined class Foo */ case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym) case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL - case ConstantType(const) => pmgen._equals(Literal(const), patBinder) + case ConstantType(const) => codegen._equals(Literal(const), patBinder) case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL) case _ => typeTest(patBinder, pt) } @@ -639,11 +599,92 @@ defined class Foo */ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// // the making of the trees /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + /** Interface with user-defined match monad? + * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below: + + type Matcher[P[_], M[+_], A] = { + def flatMap[B](f: P[A] => M[B]): M[B] + def orElse[B >: A](alternative: => M[B]): M[B] + } + + abstract class MatchStrategy[P[_], M[+_]] { + // runs the matcher on the given input + def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U] - trait TreeMakers { - def inMatchMonad(tp: Type): Type = appliedType(matchingMonadType, List(tp)) - lazy val optimizingCodeGen = matchingMonadType.typeSymbol eq OptionClass + def zero: M[Nothing] + def one[T](x: P[T]): M[T] + def guard[T](cond: P[Boolean], then: => P[T]): M[T] + def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt + } + + * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) + + + * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly) + + object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] { + def zero = None + def one[T](x: T) = Some(x) + // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted + def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None + def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) + def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty + } + + */ + trait MatchMonadInterface { import CODE._ + val typer: Typer + import typer._ + + object vpmName { + val one = newTermName("one") + val drop = newTermName("drop") + val flatMap = newTermName("flatMap") + val get = newTermName("get") + val guard = newTermName("guard") + val isEmpty = newTermName("isEmpty") + val orElse = newTermName("orElse") + val outer = newTermName("") + val runOrElse = newTermName("runOrElse") + val zero = newTermName("zero") + val __match = newTermName("__match") + + def counted(str: String, i: Int) = newTermName(str+i) + } + + final lazy val matchStrategy = // typing `__match` instead of just returning EmptyTree adds 4% to quick.comp.timer + newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + case SilentResultValue(ms) => ms + case _ => EmptyTree + } + + final def optimizingCodeGen: Boolean = matchStrategy eq EmptyTree + + def __match(n: Name): SelectStart = matchStrategy DOT n + + private lazy val oneSig: Type = + typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message + + final def inMatchMonad(tp: Type): Type = + if(optimizingCodeGen) optionType(tp) + else appliedType(oneSig, List(tp)).finalResultType + + private lazy val matchMonadSym = + if(optimizingCodeGen) OptionClass + else oneSig.finalResultType.typeSymbol + + final def matchMonadResult(tp: Type): Type = + tp.baseType(matchMonadSym).typeArgs match { + case arg :: Nil => arg + case _ => ErrorType + } + + final def pureType(tp: Type): Type = + if(optimizingCodeGen) tp + else appliedType(oneSig, List(tp)).paramTypes.head + } + trait TreeMakers extends MatchMonadInterface { abstract class TreeMaker { /** captures the scope and the value of the bindings in patterns * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed) @@ -675,7 +716,7 @@ defined class Foo */ case class BodyTreeMaker(body: Tree, matchPt: Type) extends TreeMaker { val localSubstitution: Substitution = EmptySubstitution def chainBefore(next: Tree, pt: Type): Tree = // assert(next eq EmptyTree) - atPos(body.pos)(substitution(pmgen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here + atPos(body.pos)(substitution(codegen.one(body, body.tpe, matchPt))) // since SubstOnly treemakers are dropped, need to do it here } case class SubstOnlyTreeMaker(prevBinder: Symbol, nextBinder: Symbol) extends TreeMaker { @@ -687,20 +728,18 @@ defined class Foo */ val nextBinder: Symbol } - abstract class FreshFunTreeMaker extends FunTreeMaker { + abstract class CondTreeMaker extends FunTreeMaker { val pos: Position val prevBinder: Symbol val nextBinderTp: Type - lazy val nextBinder = freshSym(pos, nextBinderTp) - lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) - } - - abstract class CondTreeMaker extends FreshFunTreeMaker { val cond: Tree val res: Tree + lazy val nextBinder = freshSym(pos, nextBinderTp) + lazy val localSubstitution = Substitution(List(prevBinder), List(CODE.REF(nextBinder))) + def chainBefore(next: Tree, pt: Type): Tree = - atPos(pos)(pmgen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next))) + atPos(pos)(codegen.flatMapCond(cond, res, nextBinder, nextBinderTp, substitution(next))) } /** @@ -712,10 +751,10 @@ defined class Foo */ */ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker { def chainBefore(next: Tree, pt: Type): Tree = { - val condAndNext = extraCond map (pmgen.condOptimized(_, next)) getOrElse next + val condAndNext = extraCond map (codegen.condOptimized(_, next)) getOrElse next atPos(extractor.pos)( - if (extractorReturnsBoolean) pmgen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext)) - else pmgen.flatMap(extractor, nextBinder, substitution(condAndNext)) + if (extractorReturnsBoolean) codegen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext)) + else codegen.flatMap(extractor, nextBinder, substitution(condAndNext)) ) } @@ -727,7 +766,7 @@ defined class Foo */ def chainBefore(next: Tree, pt: Type): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL val cond = extraCond map (nullCheck AND _) getOrElse nullCheck - pmgen.condOptimized(cond, substitution(next)) + codegen.condOptimized(cond, substitution(next)) } override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution) @@ -737,7 +776,7 @@ defined class Foo */ // need to substitute since binder may be used outside of the next extractor call (say, in the body of the case) case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker { val cond = typeTest(prevBinder, nextBinderTp) - val res = pmgen._asInstanceOf(prevBinder, nextBinderTp) + val res = codegen._asInstanceOf(prevBinder, nextBinderTp) override def toString = "TT"+(prevBinder, nextBinderTp) } @@ -746,7 +785,7 @@ defined class Foo */ val nextBinderTp = glb(List(patBinder.info.widen, pt)) val cond = typeAndEqualityTest(patBinder, pt) - val res = pmgen._asInstanceOf(patBinder, nextBinderTp) + val res = codegen._asInstanceOf(patBinder, nextBinderTp) override def toString = "TET"+(patBinder, pt) } @@ -756,7 +795,7 @@ defined class Foo */ // NOTE: generate `patTree == patBinder`, since the extractor must be in control of the equals method (also, patBinder may be null) // equals need not be well-behaved, so don't intersect with pattern's (stabilized) type (unlike MaybeBoundTyped's accumType, where it's required) - val cond = pmgen._equals(patTree, prevBinder) + val cond = codegen._equals(patTree, prevBinder) val res = CODE.REF(prevBinder) override def toString = "ET"+(prevBinder, patTree) } @@ -791,7 +830,7 @@ defined class Foo */ if (canDuplicate) { altss map {altTreeMakers => combineExtractors(altTreeMakers :+ TrivialTreeMaker(substitution(next).duplicate), pt) - } reduceLeft pmgen.typedOrElse(pt) + } reduceLeft codegen.typedOrElse(pt) } else { val rest = freshSym(pos, functionType(List(), inMatchMonad(pt)), "rest") // rest.info.member(nme.apply).withAnnotation(AnnotationInfo(ScalaInlineClass.tpe, Nil, Nil)) @@ -803,7 +842,7 @@ defined class Foo */ ) BLOCK( VAL(rest) === Function(Nil, substitution(next)), - combinedAlts reduceLeft pmgen.typedOrElse(pt) + combinedAlts reduceLeft codegen.typedOrElse(pt) ) } ) @@ -812,7 +851,7 @@ defined class Foo */ case class GuardTreeMaker(guardTree: Tree) extends TreeMaker { val localSubstitution: Substitution = EmptySubstitution - def chainBefore(next: Tree, pt: Type): Tree = pmgen.flatMapGuard(substitution(guardTree), next) + def chainBefore(next: Tree, pt: Type): Tree = codegen.flatMapGuard(substitution(guardTree), next) override def toString = "G("+ guardTree +")" } @@ -1066,12 +1105,12 @@ defined class Foo */ lazy val storedCond = freshSym(pos, BooleanClass.tpe, "rc") setFlag MUTABLE lazy val treesToHoist: List[Tree] = { nextBinder setFlag MUTABLE - List(storedCond, nextBinder) map { b => VAL(b) === pmgen.mkZero(b.info) } + List(storedCond, nextBinder) map { b => VAL(b) === codegen.mkZero(b.info) } } // TODO: finer-grained duplication def chainBefore(next: Tree, pt: Type): Tree = - atPos(pos)(pmgen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) + atPos(pos)(codegenOpt.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) } case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ @@ -1093,7 +1132,7 @@ defined class Foo */ IF (cond) THEN BLOCK( substitution(next).duplicate // TODO: finer-grained duplication -- MUST duplicate though, or we'll get VerifyErrors since sharing trees confuses lambdalift, and its confusion it emits illegal casts (diagnosed by Grzegorz: checkcast T ; invokevirtual S.m, where T not a subtype of S) - ) ELSE pmgen.zero + ) ELSE codegen.zero } } @@ -1242,7 +1281,7 @@ defined class Foo */ else casesUnOpt val combinedCases = - cases.map(combineExtractors(_, pt)).reduceLeft(pmgen.typedOrElse(optPt)) + cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt)) toHoist = ( for (treeMakers <- cases) @@ -1250,9 +1289,9 @@ defined class Foo */ ).flatten.flatten.toList (combinedCases, hasDefault) - } else (pmgen.zero, false) + } else (codegen.zero, false) - val expr = pmgen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault) + val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault) if (toHoist isEmpty) expr else Block(toHoist, expr) } @@ -1333,30 +1372,42 @@ defined class Foo */ } - def matchingMonadType: Type def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x"): Symbol def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree def typeTest(binder: Symbol, pt: Type): Tree // codegen relevant to the structure of the translation (how extractors are combined) - trait AbsCodeGen { import CODE.UNIT + trait AbsCodeGen { def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree + def zero: Tree def flatMap(prev: Tree, b: Symbol, next: Tree): Tree + def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree - def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree def flatMapGuard(cond: Tree, next: Tree): Tree + def fun(arg: Symbol, body: Tree): Tree - def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree - def zero: Tree - def one(res: Tree, bodyPt: Type, matchPt: Type): Tree def condOptimized(c: Tree, then: Tree): Tree def _equals(checker: Tree, binder: Symbol): Tree def _asInstanceOf(b: Symbol, tp: Type): Tree def mkZero(tp: Type): Tree + + def tupleSel(binder: Symbol)(i: Int): Tree + def index(tgt: Tree)(i: Int): Tree + def drop(tgt: Tree)(n: Int): Tree + def and(a: Tree, b: Tree): Tree + def _isInstanceOf(b: Symbol, tp: Type): Tree + } + + trait AbsOptimizedCodeGen extends AbsCodeGen { + def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree } - def pmgen: AbsCodeGen + def codegen: AbsCodeGen + def codegenOpt: AbsOptimizedCodeGen = codegen.asInstanceOf[AbsOptimizedCodeGen] + def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator } @@ -1365,40 +1416,38 @@ defined class Foo */ /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// trait MatchCodeGen extends TreeMakers { - lazy val pmgen: CommonCodeGen with MatchingStrategyGen with MonadInstGen = - if (optimizingCodeGen) (new CommonCodeGen with OptimizedCodeGen {}) - else (new CommonCodeGen with MatchingStrategyGen with MonadInstGen {}) + lazy val codegen: AbsCodeGen = if (optimizingCodeGen) new OptimizedCodeGen else new NaiveCodeGen import CODE._ - trait MatchingStrategyGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => - // methods in MatchingStrategy (the monad companion) -- used directly in translation + class NaiveCodeGen extends CommonCodeGen { + //// methods in MatchingStrategy (the monad companion) -- used directly in translation + // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`) def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree - = genTypeApply(matchingStrategy DOT vpmName.runOrElse, scrutSym.info, resTp) APPLY (scrut) APPLY (fun(scrutSym, matcher)) // matchingStrategy.runOrElse(scrut)(matcher) - // *only* used to wrap the RHS of a body (isDefinedAt synthesis relies on this) - def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (matchingStrategy DOT vpmName.one) (_asInstanceOf(res, bodyPt, force = true)) // matchingStrategy.one(res), like one, but blow this one away for isDefinedAt (since it's the RHS of a case) - def zero: Tree = matchingStrategy DOT vpmName.zero // matchingStrategy.zero - def guard(c: Tree, then: Tree, tp: Type): Tree = genTypeApply((matchingStrategy DOT vpmName.guard), repackExistential(tp)) APPLY (c, then) // matchingStrategy.guard[tp](c, then) - } - - trait MonadInstGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => - // methods in the monad instance -- used directly in translation - def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) - def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (genTypeApply(thisCase DOT vpmName.orElse, pt)) APPLY (elseCase) - - // TODO: the trees generated by flatMapCond and flatMapGuard may need to be distinguishable by exhaustivity checking -- they aren't right now - def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, - nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next) - def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, UnitClass.tpe), UnitClass.tpe, next) - def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = throw new UnsupportedOperationException("Can't optimize under user-defined monad.") + = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher)) + // __match.one(`res`) + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res) + // __match.zero + def zero: Tree = __match(vpmName.zero) + // __match.guard(`c`, `then`) + def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then) + + //// methods in the monad instance -- used directly in translation + // `prev`.flatMap(`b` => `next`) + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) + // `thisCase`.orElse(`elseCase`) + def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase) + // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`) + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next) + // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`) + def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next) } // when we know we're targetting Option, do some inlining the optimizer won't do - // `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard - // this is a special instance of the advanced inlining optimization that takes a method call on - // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases - // this trait overrides ALL of the methods of MatchingStrategyGen with MonadInstGen - trait OptimizedCodeGen extends CommonCodeGen with MatchingStrategyGen with MonadInstGen { + // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard + // this is a special instance of the advanced inlining optimization that takes a method call on + // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases + class OptimizedCodeGen extends CommonCodeGen with AbsOptimizedCodeGen { lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero") /** Inline runOrElse and get rid of Option allocations @@ -1410,7 +1459,7 @@ defined class Foo */ @inline private def dontStore(tp: Type) = (tp.typeSymbol eq UnitClass) || (tp.typeSymbol eq NothingClass) lazy val keepGoing = freshSym(NoPosition, BooleanClass.tpe, "keepGoing") setFlag MUTABLE lazy val matchRes = freshSym(NoPosition, AnyClass.tpe, "matchRes") setFlag MUTABLE - override def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = { + def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean) = { matchRes.info = if (resTp ne NoType) resTp.widen else AnyClass.tpe // we don't always know resTp, and it might be AnyVal, in which case we can't assign NULL if (dontStore(resTp)) matchRes resetFlag MUTABLE // don't assign to Unit-typed var's, in fact, make it a val -- conveniently also works around SI-5245 BLOCK( @@ -1425,7 +1474,7 @@ defined class Foo */ } // only used to wrap the RHS of a body - override def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = { + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = { BLOCK( if (dontStore(matchPt)) res // runOrElse hasn't been called yet, so matchRes.isMutable is irrelevant, also, tp may be a subtype of resTp used in runOrElse... else (REF(matchRes) === res), // _asInstanceOf(res, tp.widen, force = true) @@ -1434,12 +1483,9 @@ defined class Foo */ ) } - override def zero: Tree = REF(zeroSym) - - // guard is only used by flatMapCond and flatMapGuard, which are overridden - override def guard(c: Tree, then: Tree, tp: Type): Tree = throw new NotImplementedError("guard is never called by optimizing codegen") + def zero: Tree = REF(zeroSym) - override def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = { + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = { val tp = inMatchMonad(b.tpe) val prevSym = freshSym(prev.pos, tp, "o") val isEmpty = tp member vpmName.isEmpty @@ -1451,27 +1497,27 @@ defined class Foo */ ) } - override def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = { + def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = { BLOCK( thisCase, IF (REF(keepGoing)) THEN elseCase ELSE zero // leave trailing zero for now, otherwise typer adds () anyway ) } - override def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = IF (cond) THEN BLOCK( VAL(nextBinder) === res, next ) ELSE zero - override def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = + def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree = IF (cond) THEN BLOCK( condSym === TRUE, nextBinder === res, next ) ELSE zero - override def flatMapGuard(guardTree: Tree, next: Tree): Tree = + def flatMapGuard(guardTree: Tree, next: Tree): Tree = IF (guardTree) THEN next ELSE zero } @@ -1514,25 +1560,10 @@ defined class Foo */ case _ => tp } - object vpmName { - val one = newTermName("one") - val drop = newTermName("drop") - val flatMap = newTermName("flatMap") - val get = newTermName("get") - val guard = newTermName("guard") - val isEmpty = newTermName("isEmpty") - val orElse = newTermName("orElse") - val outer = newTermName("") - val runOrElse = newTermName("runOrElse") - val zero = newTermName("zero") - - def counted(str: String, i: Int) = newTermName(str+i) - } - def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt)) - trait CommonCodeGen extends AbsCodeGen { self: CommonCodeGen with MatchingStrategyGen with MonadInstGen => + abstract class CommonCodeGen extends AbsCodeGen { def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder @@ -1575,56 +1606,5 @@ defined class Foo */ } } } - - def matchingStrategy: Tree } } - -// object noShadowedUntyped extends Traverser { -// override def traverse(t: Tree) { -// if ((t.tpe ne null) && (t.tpe ne NoType)) okTree = t -// else if(okTree ne null) println("untyped subtree "+ t +" in typed tree"+ okTree +" : "+ okTree.tpe) -// super.traverse(t) -// } -// var okTree: Tree = null -// } -// private def c(t: Tree): Tree = noShadowedUntyped(t) - - // def approximateTreeMaker(tm: TreeMaker): List[Test] = tm match { - // case ExtractorTreeMaker(extractor, _, _) => HavocTest - // case FilteredExtractorTreeMaker(extractor, lenGuard, _, _) => HavocTest - // case ProductExtractorTreeMaker(testedBinder, lenGuard, _) => TopTest // TODO: (testedBinder ne null) and lenGuard - // - // // cond = typeTest(prevBinder, nextBinderTp) - // // res = pmgen._asInstanceOf(prevBinder, nextBinderTp) - // case TypeTestTreeMaker(testedBinder, pt, _) => - // - // // cond = typeAndEqualityTest(patBinder, pt) - // // res = pmgen._asInstanceOf(patBinder, nextBinderTp) - // case TypeAndEqualityTestTreeMaker(_, testedBinder, pt, _) => - // - // // cond = pmgen._equals(patTree, prevBinder) - // // res = CODE.REF(prevBinder) - // case EqualityTestTreeMaker(testedBinder, rhs, _) => - // - // case AlternativesTreeMaker(_, alts: *) => - // - // case GuardTreeMaker(guardTree) => - // } - - // // TODO: it's not exactly sound to represent an unapply-call by its symbol... also need to consider the prefix, like the outer-test (can this be captured as the path to this test?) - // type ExtractorRepr = Symbol - // - // // TODO: we're undoing tree-construction that we ourselves performed earlier -- how about not-doing so we don't have to undo? - // private def findBinderArgOfApply(extractor: Tree, unappSym: Symbol): Symbol = { - // class CollectTreeTraverser[T](pf: PartialFunction[Tree => T]) extends Traverser { - // val hits = new ListBuffer[T] - // override def traverse(t: Tree) { - // if (pf.isDefinedAt(t)) hits += pf(t) - // super.traverse(t) - // } - // } - // val trav = new CollectTreeTraverser{ case Apply(unapp, List(arg)) if unapp.symbol eq unappSym => arg.symbol} - // trav.traverse(extractor) - // trav.hits.headOption getOrElse NoSymbol - // } diff --git a/src/library/scala/MatchingStrategy.scala b/src/library/scala/MatchingStrategy.scala deleted file mode 100644 index d11598bad6..0000000000 --- a/src/library/scala/MatchingStrategy.scala +++ /dev/null @@ -1,27 +0,0 @@ -package scala - -abstract class MatchingStrategy[M[+x]] { - // runs the matcher on the given input - def runOrElse[T, U](in: T)(matcher: T => M[U]): U - - def zero: M[Nothing] - def one[T](x: T): M[T] - def guard[T](cond: Boolean, then: => T): M[T] - def isSuccess[T, U](x: T)(f: T => M[U]): Boolean // used for isDefinedAt - - def caseResult[T](x: T): M[T] = one(x) // used as a marker to distinguish the RHS of a case (case pat => RHS) and intermediate successes - // when deriving a partial function from a pattern match, - // we need to distinguish the RHS of a case, which should not be evaluated when computing isDefinedAt, - // from an intermediate result (which must be computed) -} - -object MatchingStrategy { - implicit object OptionMatchingStrategy extends MatchingStrategy[Option] { - type M[+x] = Option[x] - @inline def runOrElse[T, U](x: T)(f: T => M[U]): U = f(x) getOrElse (throw new MatchError(x)) - @inline def zero: M[Nothing] = None - @inline def one[T](x: T): M[T] = Some(x) - @inline def guard[T](cond: Boolean, then: => T): M[T] = if(cond) Some(then) else None - @inline def isSuccess[T, U](x: T)(f: T => M[U]): Boolean = !f(x).isEmpty - } -} \ No newline at end of file diff --git a/test/files/run/virtpatmat_staging.check b/test/files/run/virtpatmat_staging.check new file mode 100644 index 0000000000..106ae40b99 --- /dev/null +++ b/test/files/run/virtpatmat_staging.check @@ -0,0 +1 @@ +runOrElse(7, ?guard(false,?).flatMap(? =>one(foo)).orElse(one(bar))) diff --git a/test/files/run/virtpatmat_staging.flags b/test/files/run/virtpatmat_staging.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/run/virtpatmat_staging.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/run/virtpatmat_staging.scala b/test/files/run/virtpatmat_staging.scala new file mode 100644 index 0000000000..c17b45043b --- /dev/null +++ b/test/files/run/virtpatmat_staging.scala @@ -0,0 +1,52 @@ +trait Intf { + type Rep[+T] + type M[+T] = Rep[Maybe[T]] + + val __match: Matcher + abstract class Matcher { + // runs the matcher on the given input + def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U] + + def zero: M[Nothing] + def one[T](x: Rep[T]): M[T] + def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T] + def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] // used for isDefinedAt + } + + abstract class Maybe[+A] { + def flatMap[B](f: Rep[A] => M[B]): M[B] + def orElse[B >: A](alternative: => M[B]): M[B] + } + + implicit def proxyMaybe[A](m: M[A]): Maybe[A] + implicit def repInt(x: Int): Rep[Int] + implicit def repBoolean(x: Boolean): Rep[Boolean] + implicit def repString(x: String): Rep[String] + + def test = 7 match { case 5 => "foo" case _ => "bar" } +} + +trait Impl extends Intf { + type Rep[+T] = String + + object __match extends Matcher { + def runOrElse[T, U](in: Rep[T])(matcher: Rep[T] => M[U]): Rep[U] = ("runOrElse("+ in +", ?" + matcher("?") + ")") + def zero: M[Nothing] = "zero" + def one[T](x: Rep[T]): M[T] = "one("+x.toString+")" + def guard[T](cond: Rep[Boolean], then: => Rep[T]): M[T] = "guard("+cond+","+then+")" + def isSuccess[T, U](x: Rep[T])(f: Rep[T] => M[U]): Rep[Boolean] = ("isSuccess("+x+", ?" + f("?") + ")") + } + + implicit def proxyMaybe[A](m: M[A]): Maybe[A] = new Maybe[A] { + def flatMap[B](f: Rep[A] => M[B]): M[B] = m + ".flatMap(? =>"+ f("?") +")" + def orElse[B >: A](alternative: => M[B]): M[B] = m + ".orElse("+ alternative +")" + } + + def repInt(x: Int): Rep[Int] = x.toString + def repBoolean(x: Boolean): Rep[Boolean] = x.toString + def repString(x: String): Rep[String] = x +} + +object Test extends Impl with Intf with App { + println(test) +} -- cgit v1.2.3 From c58b240177bf6b1017b5fdb6cbfb7be49b4ee3f1 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sun, 8 Jan 2012 15:39:58 +0100 Subject: [vpm] factored out optimizing codegen --- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 1070 ++++++++++---------- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- 2 files changed, 550 insertions(+), 522 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index aef85206fa..6d31243fd0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -7,8 +7,7 @@ package scala.tools.nsc package typechecker import symtab._ -import Flags.{ CASE => _, _ } - +import Flags.{MUTABLE, METHOD, LABEL, SYNTHETIC} /** Translate pattern matching into method calls (these methods form a zero-plus monad), similar in spirit to how for-comprehensions are compiled. * @@ -33,12 +32,90 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => import global._ import definitions._ - class MatchTranslator(val typer: Typer) extends MatchCodeGen { - def typed(tree: Tree, mode: Int, pt: Type): Tree = typer.typed(tree, mode, pt) // for MatchCodeGen -- imports don't provide implementations for abstract members + object vpmName { + val one = newTermName("one") + val drop = newTermName("drop") + val flatMap = newTermName("flatMap") + val get = newTermName("get") + val guard = newTermName("guard") + val isEmpty = newTermName("isEmpty") + val orElse = newTermName("orElse") + val outer = newTermName("") + val runOrElse = newTermName("runOrElse") + val zero = newTermName("zero") + val __match = newTermName("__match") + + def counted(str: String, i: Int) = newTermName(str+i) + } + + object MatchTranslator { + def apply(typer: Typer): MatchTranslation = { + import typer._ + // typing `__match` to decide which MatchTranslator to create adds 4% to quick.comp.timer + newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { + case SilentResultValue(ms) => new PureMatchTranslator(typer, ms) + case _ => new OptimizingMatchTranslator(typer) + } + } + } + + class PureMatchTranslator(val typer: Typer, val matchStrategy: Tree) extends MatchTranslation with TreeMakers with PureCodegen + class OptimizingMatchTranslator(val typer: Typer) extends MatchTranslation with TreeMakers with MatchOptimizations + +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// talking to userland +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + /** Interface with user-defined match monad? + * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below: + + type Matcher[P[_], M[+_], A] = { + def flatMap[B](f: P[A] => M[B]): M[B] + def orElse[B >: A](alternative: => M[B]): M[B] + } + + abstract class MatchStrategy[P[_], M[+_]] { + // runs the matcher on the given input + def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U] + + def zero: M[Nothing] + def one[T](x: P[T]): M[T] + def guard[T](cond: P[Boolean], then: => P[T]): M[T] + def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt + } + + * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) + - import typer._ - import typeDebug.{ ptTree, ptBlock, ptLine } + * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly) + + object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] { + def zero = None + def one[T](x: T) = Some(x) + // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted + def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None + def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) + def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty + } + + */ + trait MatchMonadInterface { + val typer: Typer + val matchOwner = typer.context.owner + + def inMatchMonad(tp: Type): Type + def pureType(tp: Type): Type + final def matchMonadResult(tp: Type): Type = + tp.baseType(matchMonadSym).typeArgs match { + case arg :: Nil => arg + case _ => ErrorType + } + + protected def matchMonadSym: Symbol + } + trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore => + import typer.{typed, context, silent, reallyExists} /** Implement a pattern match by turning its cases (including the implicit failure case) * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. @@ -56,12 +133,17 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // and the only place that emits Matches after typers is for exception handling anyway) assert(phase.id <= currentRun.typerPhase.id, phase) + def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match { + case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args) + case _ => tp + } + val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen)) val scrutSym = freshSym(scrut.pos, pureType(scrutType)) val okPt = repeatedToSeq(pt) // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental - combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, context.owner) + combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner) } @@ -497,81 +579,6 @@ class Foo(x: Other) { x._1 } // no error in this order override def toString() = extractorCall +": "+ extractorCall.tpe +" (symbol= "+ extractorCall.symbol +")." } - // tack an outer test onto `cond` if binder.info and expectedType warrant it - def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._ - if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass) - && needsOuterTest(expectedTp, binder.info, context.owner)) { - val expectedPrefix = expectedTp.prefix match { - case ThisType(clazz) => THIS(clazz) - case pre => REF(pre.prefix, pre.termSymbol) - } - - // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` - // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor? - val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC - val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix - - // first check cond, since that should ensure we're not selecting outer on null - codegen.and(cond, outerCheck) - } - else - cond - } - - // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest) - def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt) - def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(codegen._isInstanceOf(binder, pt)) - - /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms: - - A reference to a class C, p.C, or T#C. - This type pattern matches any non-null instance of the given class. - Note that the prefix of the class, if it is given, is relevant for determining class instances. - For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix. - The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case. - - - A singleton type p.type. - This type pattern matches only the value denoted by the path p - (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now - // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time" - - - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern. - This type pattern matches all values that are matched by each of the type patterns Ti. - - - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _. - This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards. - The bounds or alias type of these type variable are determined as described in (§8.3). - - - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO - This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1. - **/ - - // generate the tree for the run-time test that follows from the fact that - // a `scrut` of known type `scrutTp` is expected to have type `expectedTp` - // uses maybeWithOuterCheck to check the type's prefix - def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._ - // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null` - // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false") - def genEqualsAndInstanceOf(sym: Symbol): Tree - = codegen._equals(REF(sym), patBinder) AND codegen._isInstanceOf(patBinder, pt.widen) - - def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe - - val patBinderTp = patBinder.info.widen - def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt) - - // TODO: [SPEC] type test for Array - // TODO: use manifests to improve tests (for erased types we can do better when we have a manifest) - pt match { - case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here - case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil - case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym) - case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL - case ConstantType(const) => codegen._equals(Literal(const), patBinder) - case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL) - case _ => typeTest(patBinder, pt) - } - } - /** A conservative approximation of which patterns do not discern anything. * They are discarded during the translation. */ @@ -597,94 +604,70 @@ class Foo(x: Other) { x._1 } // no error in this order } /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// the making of the trees +// substitution /////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - /** Interface with user-defined match monad? - * if there's a `__match` in scope, we use this as the match strategy, assuming it conforms to MatchStrategy as defined below: - - type Matcher[P[_], M[+_], A] = { - def flatMap[B](f: P[A] => M[B]): M[B] - def orElse[B >: A](alternative: => M[B]): M[B] - } - - abstract class MatchStrategy[P[_], M[+_]] { - // runs the matcher on the given input - def runOrElse[T, U](in: P[T])(matcher: P[T] => M[U]): P[U] - - def zero: M[Nothing] - def one[T](x: P[T]): M[T] - def guard[T](cond: P[Boolean], then: => P[T]): M[T] - def isSuccess[T, U](x: P[T])(f: P[T] => M[U]): P[Boolean] // used for isDefinedAt - } - - * P and M are derived from one's signature (`def one[T](x: P[T]): M[T]`) - - - * if no `__match` is found, we assume the following implementation (and generate optimized code accordingly) - - object __match extends MatchStrategy[({type Id[x] = x})#Id, Option] { - def zero = None - def one[T](x: T) = Some(x) - // NOTE: guard's return type must be of the shape M[T], where M is the monad in which the pattern match should be interpreted - def guard[T](cond: Boolean, then: => T): Option[T] = if(cond) Some(then) else None - def runOrElse[T, U](x: T)(f: T => Option[U]): U = f(x) getOrElse (throw new MatchError(x)) - def isSuccess[T, U](x: T)(f: T => Option[U]): Boolean = !f(x).isEmpty - } - - */ - trait MatchMonadInterface { import CODE._ - val typer: Typer - import typer._ - - object vpmName { - val one = newTermName("one") - val drop = newTermName("drop") - val flatMap = newTermName("flatMap") - val get = newTermName("get") - val guard = newTermName("guard") - val isEmpty = newTermName("isEmpty") - val orElse = newTermName("orElse") - val outer = newTermName("") - val runOrElse = newTermName("runOrElse") - val zero = newTermName("zero") - val __match = newTermName("__match") - - def counted(str: String, i: Int) = newTermName(str+i) + trait TypedSubstitution extends MatchMonadInterface { + object Substitution { + def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to)) + // requires sameLength(from, to) + def apply(from: List[Symbol], to: List[Tree]) = + if (from nonEmpty) new Substitution(from, to) else EmptySubstitution } - final lazy val matchStrategy = // typing `__match` instead of just returning EmptyTree adds 4% to quick.comp.timer - newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName.__match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { - case SilentResultValue(ms) => ms - case _ => EmptyTree + class Substitution(val from: List[Symbol], val to: List[Tree]) { + // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, + // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. + def apply(tree: Tree): Tree = { + // according to -Ystatistics 10% of translateMatch's time is spent in this method... + // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst + if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree + else (new Transformer { + @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = + if (origTp == null || origTp == NoType) to + // important: only type when actually substing and when original tree was typed + // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors) + else typer.typed(to, EXPRmode, WildcardType) + + override def transform(tree: Tree): Tree = { + def subst(from: List[Symbol], to: List[Tree]): Tree = + if (from.isEmpty) tree + else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe) + else subst(from.tail, to.tail) + + tree match { + case Ident(_) => subst(from, to) + case _ => super.transform(tree) + } + } + }).transform(tree) } - final def optimizingCodeGen: Boolean = matchStrategy eq EmptyTree - - def __match(n: Name): SelectStart = matchStrategy DOT n - - private lazy val oneSig: Type = - typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message - - final def inMatchMonad(tp: Type): Type = - if(optimizingCodeGen) optionType(tp) - else appliedType(oneSig, List(tp)).finalResultType - private lazy val matchMonadSym = - if(optimizingCodeGen) OptionClass - else oneSig.finalResultType.typeSymbol - - final def matchMonadResult(tp: Type): Type = - tp.baseType(matchMonadSym).typeArgs match { - case arg :: Nil => arg - case _ => ErrorType + // the substitution that chains `other` before `this` substitution + // forall t: Tree. this(other(t)) == (this >> other)(t) + def >>(other: Substitution): Substitution = { + val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) } + new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly } + override def toString = (from zip to) mkString("Substitution(", ", ", ")") + } - final def pureType(tp: Type): Type = - if(optimizingCodeGen) tp - else appliedType(oneSig, List(tp)).paramTypes.head + object EmptySubstitution extends Substitution(Nil, Nil) { + override def apply(tree: Tree): Tree = tree + override def >>(other: Substitution): Substitution = other + } } - trait TreeMakers extends MatchMonadInterface { +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// the making of the trees +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + trait TreeMakers extends TypedSubstitution { self: CodegenCore => + def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = + (cases, Nil) + + def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = + None + abstract class TreeMaker { /** captures the scope and the value of the bindings in patterns * important *when* the substitution happens (can't accumulate and do at once after the full matcher has been constructed) @@ -751,7 +734,7 @@ class Foo(x: Other) { x._1 } // no error in this order */ case class ExtractorTreeMaker(extractor: Tree, extraCond: Option[Tree], nextBinder: Symbol, localSubstitution: Substitution)(extractorReturnsBoolean: Boolean) extends FunTreeMaker { def chainBefore(next: Tree, pt: Type): Tree = { - val condAndNext = extraCond map (codegen.condOptimized(_, next)) getOrElse next + val condAndNext = extraCond map (codegen.ifThenElseZero(_, next)) getOrElse next atPos(extractor.pos)( if (extractorReturnsBoolean) codegen.flatMapCond(extractor, CODE.UNIT, nextBinder, nextBinder.info.widen, substitution(condAndNext)) else codegen.flatMap(extractor, nextBinder, substitution(condAndNext)) @@ -766,12 +749,36 @@ class Foo(x: Other) { x._1 } // no error in this order def chainBefore(next: Tree, pt: Type): Tree = { val nullCheck = REF(prevBinder) OBJ_NE NULL val cond = extraCond map (nullCheck AND _) getOrElse nullCheck - codegen.condOptimized(cond, substitution(next)) + codegen.ifThenElseZero(cond, substitution(next)) } override def toString = "P"+(prevBinder, extraCond getOrElse "", localSubstitution) } + // tack an outer test onto `cond` if binder.info and expectedType warrant it + def maybeWithOuterCheck(binder: Symbol, expectedTp: Type)(cond: Tree): Tree = { import CODE._ + if ( !((expectedTp.prefix eq NoPrefix) || expectedTp.prefix.typeSymbol.isPackageClass) + && needsOuterTest(expectedTp, binder.info, matchOwner)) { + val expectedPrefix = expectedTp.prefix match { + case ThisType(clazz) => THIS(clazz) + case pre => REF(pre.prefix, pre.termSymbol) + } + + // ExplicitOuter replaces `Select(q, outerSym) OBJ_EQ expectedPrefix` by `Select(q, outerAccessor(outerSym.owner)) OBJ_EQ expectedPrefix` + // if there's an outer accessor, otherwise the condition becomes `true` -- TODO: can we improve needsOuterTest so there's always an outerAccessor? + val outer = expectedTp.typeSymbol.newMethod(vpmName.outer) setInfo expectedTp.prefix setFlag SYNTHETIC + val outerCheck = (Select(codegen._asInstanceOf(binder, expectedTp), outer)) OBJ_EQ expectedPrefix + + // first check cond, since that should ensure we're not selecting outer on null + codegen.and(cond, outerCheck) + } + else + cond + } + + // TODO: also need to test when erasing pt loses crucial information (and if we can recover it using a manifest) + def needsTypeTest(tp: Type, pt: Type) = !(tp <:< pt) + private def typeTest(binder: Symbol, pt: Type) = maybeWithOuterCheck(binder, pt)(codegen._isInstanceOf(binder, pt)) // need to substitute since binder may be used outside of the next extractor call (say, in the body of the case) case class TypeTestTreeMaker(prevBinder: Symbol, nextBinderTp: Type, pos: Position) extends CondTreeMaker { @@ -784,6 +791,56 @@ class Foo(x: Other) { x._1 } // no error in this order case class TypeAndEqualityTestTreeMaker(prevBinder: Symbol, patBinder: Symbol, pt: Type, pos: Position) extends CondTreeMaker { val nextBinderTp = glb(List(patBinder.info.widen, pt)) + /** Type patterns consist of types, type variables, and wildcards. A type pattern T is of one of the following forms: + - A reference to a class C, p.C, or T#C. + This type pattern matches any non-null instance of the given class. + Note that the prefix of the class, if it is given, is relevant for determining class instances. + For instance, the pattern p.C matches only instances of classes C which were created with the path p as prefix. + The bottom types scala.Nothing and scala.Null cannot be used as type patterns, because they would match nothing in any case. + + - A singleton type p.type. + This type pattern matches only the value denoted by the path p + (that is, a pattern match involved a comparison of the matched value with p using method eq in class AnyRef). // TODO: the actual pattern matcher uses ==, so that's what I'm using for now + // https://issues.scala-lang.org/browse/SI-4577 "pattern matcher, still disappointing us at equality time" + + - A compound type pattern T1 with ... with Tn where each Ti is a type pat- tern. + This type pattern matches all values that are matched by each of the type patterns Ti. + + - A parameterized type pattern T[a1,...,an], where the ai are type variable patterns or wildcards _. + This type pattern matches all values which match T for some arbitrary instantiation of the type variables and wildcards. + The bounds or alias type of these type variable are determined as described in (§8.3). + + - A parameterized type pattern scala.Array[T1], where T1 is a type pattern. // TODO + This type pattern matches any non-null instance of type scala.Array[U1], where U1 is a type matched by T1. + **/ + + // generate the tree for the run-time test that follows from the fact that + // a `scrut` of known type `scrutTp` is expected to have type `expectedTp` + // uses maybeWithOuterCheck to check the type's prefix + private def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree = { import CODE._ + // TODO: `null match { x : T }` will yield a check that (indirectly) tests whether `null ne null` + // don't bother (so that we don't end up with the warning "comparing values of types Null and Null using `ne' will always yield false") + def genEqualsAndInstanceOf(sym: Symbol): Tree + = codegen._equals(REF(sym), patBinder) AND codegen._isInstanceOf(patBinder, pt.widen) + + def isRefTp(tp: Type) = tp <:< AnyRefClass.tpe + + val patBinderTp = patBinder.info.widen + def isMatchUnlessNull = isRefTp(pt) && !needsTypeTest(patBinderTp, pt) + + // TODO: [SPEC] type test for Array + // TODO: use manifests to improve tests (for erased types we can do better when we have a manifest) + pt match { + case SingleType(_, sym) /*this implies sym.isStable*/ => genEqualsAndInstanceOf(sym) // TODO: [SPEC] the spec requires `eq` instead of `==` here + case ThisType(sym) if sym.isModule => genEqualsAndInstanceOf(sym) // must use == to support e.g. List() == Nil + case ThisType(sym) => REF(patBinder) OBJ_EQ This(sym) + case ConstantType(Constant(null)) if isRefTp(patBinderTp) => REF(patBinder) OBJ_EQ NULL + case ConstantType(const) => codegen._equals(Literal(const), patBinder) + case _ if isMatchUnlessNull => maybeWithOuterCheck(patBinder, pt)(REF(patBinder) OBJ_NE NULL) + case _ => typeTest(patBinder, pt) + } + } + val cond = typeAndEqualityTest(patBinder, pt) val res = codegen._asInstanceOf(patBinder, nextBinderTp) override def toString = "TET"+(patBinder, pt) @@ -855,67 +912,292 @@ class Foo(x: Other) { x._1 } // no error in this order override def toString = "G("+ guardTree +")" } -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// decisions, decisions -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - object Test { - var currId = 0 - } - case class Test(cond: Cond, treeMaker: TreeMaker) { - // def <:<(other: Test) = cond <:< other.cond - // def andThen_: (prev: List[Test]): List[Test] = - // prev.filterNot(this <:< _) :+ this + def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) - private val reusedBy = new collection.mutable.HashSet[Test] - var reuses: Option[Test] = None - def registerReuseBy(later: Test): Unit = { - assert(later.reuses.isEmpty, later.reuses) - reusedBy += later - later.reuses = Some(this) + // a foldLeft to accumulate the localSubstitution left-to-right + // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution + def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { + var accumSubst: Substitution = initial + treeMakers foreach { maker => + maker incorporateOuterSubstitution accumSubst + accumSubst = maker.substitution } - - val id = { Test.currId += 1; Test.currId} - override def toString = - if (cond eq Top) "T" - else if(cond eq Havoc) "!?" - else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id)) - } - - object Cond { - // def refines(self: Cond, other: Cond): Boolean = (self, other) match { - // case (Bottom, _) => true - // case (Havoc , _) => true - // case (_ , Top) => true - // case (_ , _) => false - // } - var currId = 0 + removeSubstOnly(treeMakers) } - abstract class Cond { - // def testedPath: Tree - // def <:<(other: Cond) = Cond.refines(this, other) + // calls propagateSubstitution on the treemakers + def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){ + val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them - val id = { Cond.currId += 1; Cond.currId} - } + emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{ + val (matcher, hasDefault, toHoist) = + if (casesUnOpt nonEmpty) { + // when specified, need to propagate pt explicitly (type inferencer can't handle it) + val optPt = + if (isFullyDefined(pt)) inMatchMonad(pt) + else NoType - // does not contribute any knowledge - case object Top extends Cond + // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one + // exhaustivity and reachability must be checked before optimization as well + val hasDefault = casesUnOpt.nonEmpty && { + val nonTrivLast = casesUnOpt.last + nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] + } - // takes away knowledge. e.g., a user-defined guard - case object Havoc extends Cond + val (cases, toHoist) = optimizeCases(scrutSym, casesUnOpt, pt) - // we know everything! everything! - // this either means the case is unreachable, - // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives - // case object Bottom extends Cond + val combinedCases = + cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt)) + (combinedCases, hasDefault, toHoist) + } else (codegen.zero, false, Nil) - object EqualityCond { - private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond] - def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs)) + val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault) + if (toHoist isEmpty) expr + else Block(toHoist, expr) + } } - class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond { + + // combineExtractors changes the current substitution's of the tree makers in `treeMakers` + // requires propagateSubstitution(treeMakers) has been called + def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree = + treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt)) + + // TODO: do this during tree construction, but that will require tracking the current owner in treemakers + // TODO: assign more fine-grained positions + // fixes symbol nesting, assigns positions + private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser { + currentOwner = origOwner + + override def traverse(t: Tree) { + if (t != EmptyTree && t.pos == NoPosition) { + t.setPos(pos) + } + t match { + case Function(_, _) if t.symbol == NoSymbol => + t.symbol = currentOwner.newAnonymousFunctionValue(t.pos) + // println("new symbol for "+ (t, t.symbol.ownerChain)) + case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) => + // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain)) + t.symbol.owner = currentOwner + case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) + // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) + if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? + assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor) + d.symbol.lazyAccessor.owner = currentOwner + } + if(d.symbol.moduleClass ne NoSymbol) + d.symbol.moduleClass.owner = currentOwner + + d.symbol.owner = currentOwner + // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => + // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)) + case _ => + } + super.traverse(t) + } + + // override def apply + // println("before fixerupper: "+ xTree) + // currentRun.trackerFactory.snapshot() + // println("after fixerupper") + // currentRun.trackerFactory.snapshot() + } + } + + +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// generate actual trees +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + trait CodegenCore extends MatchMonadInterface { + private var ctr = 0 + def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1; + // assert(owner ne null) + // assert(owner ne NoSymbol) + NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp) + } + + // codegen relevant to the structure of the translation (how extractors are combined) + trait AbsCodegen { + def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree + def zero: Tree + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree + def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree + + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree + def flatMapGuard(cond: Tree, next: Tree): Tree + + def fun(arg: Symbol, body: Tree): Tree + def ifThenElseZero(c: Tree, then: Tree): Tree + def _equals(checker: Tree, binder: Symbol): Tree + def _asInstanceOf(b: Symbol, tp: Type): Tree + def mkZero(tp: Type): Tree + + def tupleSel(binder: Symbol)(i: Int): Tree + def index(tgt: Tree)(i: Int): Tree + def drop(tgt: Tree)(n: Int): Tree + def and(a: Tree, b: Tree): Tree + def _isInstanceOf(b: Symbol, tp: Type): Tree + } + + def codegen: AbsCodegen + + def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt)) + + abstract class CommonCodegen extends AbsCodegen { import CODE._ + def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) + def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) + def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder + def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) + def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n)) + def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya + def and(a: Tree, b: Tree): Tree = a AND b + def ifThenElseZero(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero + + // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) + def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp) + if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX) + else gen.mkAsInstanceOf(t, tpX, true, false) + } + + def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false) + // { val tpX = repackExistential(tp) + // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } + // else gen.mkIsInstanceOf(REF(b), tpX, true, false) + // } + + def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp) + if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX) + else gen.mkAsInstanceOf(REF(b), tpX, true, false) + } + + // duplicated out of frustration with cast generation + def mkZero(tp: Type): Tree = { + tp.typeSymbol match { + case UnitClass => Literal(Constant()) + case BooleanClass => Literal(Constant(false)) + case FloatClass => Literal(Constant(0.0f)) + case DoubleClass => Literal(Constant(0.0d)) + case ByteClass => Literal(Constant(0.toByte)) + case ShortClass => Literal(Constant(0.toShort)) + case IntClass => Literal(Constant(0)) + case LongClass => Literal(Constant(0L)) + case CharClass => Literal(Constant(0.toChar)) + case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here + } + } + } + } + + trait PureMatchMonadInterface extends MatchMonadInterface { + val matchStrategy: Tree + + def inMatchMonad(tp: Type): Type = appliedType(oneSig, List(tp)).finalResultType + def pureType(tp: Type): Type = appliedType(oneSig, List(tp)).paramTypes.head + protected def matchMonadSym = oneSig.finalResultType.typeSymbol + + import CODE._ + def __match(n: Name): SelectStart = matchStrategy DOT n + + private lazy val oneSig: Type = + typer.typed(__match(vpmName.one), EXPRmode | POLYmode | TAPPmode | FUNmode, WildcardType).tpe // TODO: error message + } + + trait PureCodegen extends CodegenCore with PureMatchMonadInterface { + def codegen: AbsCodegen = pureCodegen + + object pureCodegen extends CommonCodegen { import CODE._ + //// methods in MatchingStrategy (the monad companion) -- used directly in translation + // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`) + def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree + = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher)) + // __match.one(`res`) + def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res) + // __match.zero + def zero: Tree = __match(vpmName.zero) + // __match.guard(`c`, `then`) + def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then) + + //// methods in the monad instance -- used directly in translation + // `prev`.flatMap(`b` => `next`) + def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) + // `thisCase`.orElse(`elseCase`) + def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase) + // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`) + def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next) + // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`) + def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next) + } + } + + +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// OPTIMIZATIONS +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// +// decisions, decisions +/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// + + trait TreeMakerApproximation extends TreeMakers { self: CodegenCore => + object Test { + var currId = 0 + } + case class Test(cond: Cond, treeMaker: TreeMaker) { + // def <:<(other: Test) = cond <:< other.cond + // def andThen_: (prev: List[Test]): List[Test] = + // prev.filterNot(this <:< _) :+ this + + private val reusedBy = new collection.mutable.HashSet[Test] + var reuses: Option[Test] = None + def registerReuseBy(later: Test): Unit = { + assert(later.reuses.isEmpty, later.reuses) + reusedBy += later + later.reuses = Some(this) + } + + val id = { Test.currId += 1; Test.currId} + override def toString = + if (cond eq Top) "T" + else if(cond eq Havoc) "!?" + else "T"+ id + (if(reusedBy nonEmpty) "!["+ treeMaker +"]" else (if(reuses.isEmpty) "["+ treeMaker +"]" else " cf. T"+reuses.get.id)) + } + + object Cond { + // def refines(self: Cond, other: Cond): Boolean = (self, other) match { + // case (Bottom, _) => true + // case (Havoc , _) => true + // case (_ , Top) => true + // case (_ , _) => false + // } + var currId = 0 + } + + abstract class Cond { + // def testedPath: Tree + // def <:<(other: Cond) = Cond.refines(this, other) + + val id = { Cond.currId += 1; Cond.currId} + } + + // does not contribute any knowledge + case object Top extends Cond + + // takes away knowledge. e.g., a user-defined guard + case object Havoc extends Cond + + // we know everything! everything! + // this either means the case is unreachable, + // or that it is statically known to be picked -- at this point in the decision tree --> no point in emitting further alternatives + // case object Bottom extends Cond + + + object EqualityCond { + private val uniques = new collection.mutable.HashMap[(Tree, Tree), EqualityCond] + def apply(testedPath: Tree, rhs: Tree): EqualityCond = uniques getOrElseUpdate((testedPath, rhs), new EqualityCond(testedPath, rhs)) + } + class EqualityCond(testedPath: Tree, rhs: Tree) extends Cond { // def negation = TopCond // inequality doesn't teach us anything // do simplification when we know enough about the tree statically: // - collapse equal trees @@ -951,27 +1233,16 @@ class Foo(x: Other) { x._1 } // no error in this order override def toString = testedPath +" (<: && ==) "+ pt +"#"+ id } -//// CSE - - /** a flow-sensitive, generalised, common sub-expression elimination - * reuse knowledge from performed tests - * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality) - * when a sub-expression is share, it is stored in a mutable variable - * the variable is floated up so that its scope includes all of the program that shares it - * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree) - * - * intended to be generalised to exhaustivity/reachability checking - */ - def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { + def approximateMatch(root: Symbol, cases: List[List[TreeMaker]]): List[List[Test]] = { // a variable in this set should never be replaced by a tree that "does not consist of a selection on a variable in this set" (intuitively) - val pointsToBound = collection.mutable.HashSet(prevBinder) + val pointsToBound = collection.mutable.HashSet(root) // the substitution that renames variables to variables in pointsToBound var normalize: Substitution = EmptySubstitution // replaces a variable (in pointsToBound) by a selection on another variable in pointsToBound // TODO check: - // pointsToBound -- accumSubst.from == Set(prevBinder) && (accumSubst.from.toSet -- pointsToBound) isEmpty + // pointsToBound -- accumSubst.from == Set(root) && (accumSubst.from.toSet -- pointsToBound) isEmpty var accumSubst: Substitution = EmptySubstitution val trees = new collection.mutable.HashSet[Tree] @@ -1032,7 +1303,23 @@ class Foo(x: Other) { x._1 } // no error in this order }, tm) } - val testss = cases.map { _ map approximateTreeMaker } + cases.map { _ map approximateTreeMaker } + } + } + +//// + trait CommonSubconditionElimination extends TreeMakerApproximation { self: OptimizedCodegen => + /** a flow-sensitive, generalised, common sub-expression elimination + * reuse knowledge from performed tests + * the only sub-expressions we consider are the conditions and results of the three tests (type, type&equality, equality) + * when a sub-expression is share, it is stored in a mutable variable + * the variable is floated up so that its scope includes all of the program that shares it + * we generalize sharing to implication, where b reuses a if a => b and priors(a) => priors(b) (the priors of a sub expression form the path through the decision tree) + * + * intended to be generalised to exhaustivity/reachability checking + */ + def doCSE(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = { + val testss = approximateMatch(prevBinder, cases) // interpret: val dependencies = new collection.mutable.LinkedHashMap[Test, Set[Cond]] @@ -1109,8 +1396,8 @@ class Foo(x: Other) { x._1 } // no error in this order } // TODO: finer-grained duplication - def chainBefore(next: Tree, pt: Type): Tree = - atPos(pos)(codegenOpt.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) + def chainBefore(next: Tree, pt: Type): Tree = // assert(codegen eq optimizedCodegen) + atPos(pos)(optimizedCodegen.flatMapCondStored(cond, storedCond, res, nextBinder, substitution(next).duplicate)) } case class ReusingCondTreeMaker(sharedPrefix: List[Test], toReused: TreeMaker => TreeMaker) extends TreeMaker { import CODE._ @@ -1135,10 +1422,11 @@ class Foo(x: Other) { x._1 } // no error in this order ) ELSE codegen.zero } } + } -//// DCE - + //// DCE + trait DeadCodeElimination extends TreeMakers { self: CodegenCore => // TODO: non-trivial dead-code elimination // e.g., the following match should compile to a simple instanceof: // case class Ident(name: String) @@ -1147,10 +1435,10 @@ class Foo(x: Other) { x._1 } // no error in this order // do minimal DCE cases } + } - -//// SWITCHES - + //// SWITCHES + trait SwitchEmission extends TreeMakers with OptimizedMatchMonadInterface { self: CodegenCore => object SwitchablePattern { def unapply(pat: Tree) = pat match { case Literal(Constant((_: Byte ) | (_: Short) | (_: Int ) | (_: Char ))) => true // TODO: Java 7 allows strings in switches case _ => false @@ -1167,7 +1455,7 @@ class Foo(x: Other) { x._1 } // no error in this order private val switchableTpes = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe) - def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = if (!optimizingCodeGen) None else { + override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { def sequence[T](xs: List[Option[T]]): Option[List[T]] = if (xs exists (_.isEmpty)) None else Some(xs.flatten) @@ -1208,7 +1496,7 @@ class Foo(x: Other) { x._1 } // no error in this order } if (!isSwitchableTpe(scrut.tpe)) - None + None // TODO: emit a cast of the scrutinee and a switch on the cast scrutinee if patterns allow switch but the type of the scrutinee doesn't else { sequence(caseDefs) map { caseDefs => import CODE._ @@ -1238,216 +1526,27 @@ class Foo(x: Other) { x._1 } // no error in this order } } } - - def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): List[List[TreeMaker]] = - doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt) - - - def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) - - // a foldLeft to accumulate the localSubstitution left-to-right - // it drops SubstOnly tree makers, since their only goal in life is to propagate substitutions to the next tree maker, which is fullfilled by propagateSubstitution - def propagateSubstitution(treeMakers: List[TreeMaker], initial: Substitution): List[TreeMaker] = { - var accumSubst: Substitution = initial - treeMakers foreach { maker => - maker incorporateOuterSubstitution accumSubst - accumSubst = maker.substitution - } - removeSubstOnly(treeMakers) - } - - // calls propagateSubstitution on the treemakers - def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = fixerUpper(owner, scrut.pos){ - val casesUnOpt = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them - - emitSwitch(scrut, scrutSym, casesUnOpt, pt).getOrElse{ - var toHoist = List[Tree]() - val (matcher, hasDefault) = - if (casesUnOpt nonEmpty) { - // when specified, need to propagate pt explicitly (type inferencer can't handle it) - val optPt = - if (isFullyDefined(pt)) inMatchMonad(pt) - else NoType - - // do this check on casesUnOpt, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one - // exhaustivity and reachability must be checked before optimization as well - val hasDefault = casesUnOpt.nonEmpty && { - val nonTrivLast = casesUnOpt.last - nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] - } - - val cases = - if (optimizingCodeGen) optimizeCases(scrutSym, casesUnOpt, pt) - else casesUnOpt - - val combinedCases = - cases.map(combineExtractors(_, pt)).reduceLeft(codegen.typedOrElse(optPt)) - - toHoist = ( - for (treeMakers <- cases) - yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} - ).flatten.flatten.toList - - (combinedCases, hasDefault) - } else (codegen.zero, false) - - val expr = codegen.runOrElse(scrut, scrutSym, matcher, if (isFullyDefined(pt)) pt else NoType, hasDefault) - if (toHoist isEmpty) expr - else Block(toHoist, expr) - } - } - - // combineExtractors changes the current substitution's of the tree makers in `treeMakers` - // requires propagateSubstitution(treeMakers) has been called - def combineExtractors(treeMakers: List[TreeMaker], pt: Type): Tree = - treeMakers.foldRight (EmptyTree: Tree) (_.chainBefore(_, pt)) - - // TODO: do this during tree construction, but that will require tracking the current owner in treemakers - // TODO: assign more fine-grained positions - // fixes symbol nesting, assigns positions - private def fixerUpper(origOwner: Symbol, pos: Position) = new Traverser { - currentOwner = origOwner - - override def traverse(t: Tree) { - if (t != EmptyTree && t.pos == NoPosition) { - t.setPos(pos) - } - t match { - case Function(_, _) if t.symbol == NoSymbol => - t.symbol = currentOwner.newAnonymousFunctionValue(t.pos) - // println("new symbol for "+ (t, t.symbol.ownerChain)) - case Function(_, _) if (t.symbol.owner == NoSymbol) || (t.symbol.owner == origOwner) => - // println("fundef: "+ (t, t.symbol.ownerChain, currentOwner.ownerChain)) - t.symbol.owner = currentOwner - case d : DefTree if (d.symbol != NoSymbol) && ((d.symbol.owner == NoSymbol) || (d.symbol.owner == origOwner)) => // don't indiscriminately change existing owners! (see e.g., pos/t3440, pos/t3534, pos/unapplyContexts2) - // println("def: "+ (d, d.symbol.ownerChain, currentOwner.ownerChain)) - if(d.symbol.isLazy) { // for lazy val's accessor -- is there no tree?? - assert(d.symbol.lazyAccessor != NoSymbol && d.symbol.lazyAccessor.owner == d.symbol.owner, d.symbol.lazyAccessor) - d.symbol.lazyAccessor.owner = currentOwner - } - if(d.symbol.moduleClass ne NoSymbol) - d.symbol.moduleClass.owner = currentOwner - - d.symbol.owner = currentOwner - // case _ if (t.symbol != NoSymbol) && (t.symbol ne null) => - // println("untouched "+ (t, t.getClass, t.symbol.ownerChain, currentOwner.ownerChain)) - case _ => - } - super.traverse(t) - } - - // override def apply - // println("before fixerupper: "+ xTree) - // currentRun.trackerFactory.snapshot() - // println("after fixerupper") - // currentRun.trackerFactory.snapshot() - } - -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// substitution -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - object Substitution { - def apply(from: Symbol, to: Tree) = new Substitution(List(from), List(to)) - // requires sameLength(from, to) - def apply(from: List[Symbol], to: List[Tree]) = - if (from nonEmpty) new Substitution(from, to) else EmptySubstitution - } - - class Substitution(val from: List[Symbol], val to: List[Tree]) { - def apply(tree: Tree): Tree = typedSubst(tree, from, to) - - // the substitution that chains `other` before `this` substitution - // forall t: Tree. this(other(t)) == (this >> other)(t) - def >>(other: Substitution): Substitution = { - val (fromFiltered, toFiltered) = (from, to).zipped filter { (f, t) => !other.from.contains(f) } - new Substitution(other.from ++ fromFiltered, other.to.map(apply) ++ toFiltered) // a quick benchmarking run indicates the `.map(apply)` is not too costly - } - override def toString = (from zip to) mkString("Substitution(", ", ", ")") - } - - object EmptySubstitution extends Substitution(Nil, Nil) { - override def apply(tree: Tree): Tree = tree - override def >>(other: Substitution): Substitution = other - } - - - def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree - def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x"): Symbol - def typeAndEqualityTest(patBinder: Symbol, pt: Type): Tree - def typeTest(binder: Symbol, pt: Type): Tree - - // codegen relevant to the structure of the translation (how extractors are combined) - trait AbsCodeGen { - def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree - def one(res: Tree, bodyPt: Type, matchPt: Type): Tree - def zero: Tree - def flatMap(prev: Tree, b: Symbol, next: Tree): Tree - def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree - - def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree - def flatMapGuard(cond: Tree, next: Tree): Tree - - def fun(arg: Symbol, body: Tree): Tree - def condOptimized(c: Tree, then: Tree): Tree - def _equals(checker: Tree, binder: Symbol): Tree - def _asInstanceOf(b: Symbol, tp: Type): Tree - def mkZero(tp: Type): Tree - - def tupleSel(binder: Symbol)(i: Int): Tree - def index(tgt: Tree)(i: Int): Tree - def drop(tgt: Tree)(n: Int): Tree - def and(a: Tree, b: Tree): Tree - def _isInstanceOf(b: Symbol, tp: Type): Tree - } - - trait AbsOptimizedCodeGen extends AbsCodeGen { - def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree - } - - def codegen: AbsCodeGen - def codegenOpt: AbsOptimizedCodeGen = codegen.asInstanceOf[AbsOptimizedCodeGen] - - def typed(tree: Tree, mode: Int, pt: Type): Tree // implemented in MatchTranslator } -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// -// generate actual trees -/////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////////// - - trait MatchCodeGen extends TreeMakers { - lazy val codegen: AbsCodeGen = if (optimizingCodeGen) new OptimizedCodeGen else new NaiveCodeGen - - import CODE._ + trait OptimizedMatchMonadInterface extends MatchMonadInterface { + override def inMatchMonad(tp: Type): Type = optionType(tp) + override def pureType(tp: Type): Type = tp + override protected def matchMonadSym = OptionClass + } - class NaiveCodeGen extends CommonCodeGen { - //// methods in MatchingStrategy (the monad companion) -- used directly in translation - // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`) - def runOrElse(scrut: Tree, scrutSym: Symbol, matcher: Tree, resTp: Type, hasDefault: Boolean): Tree - = __match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, matcher)) - // __match.one(`res`) - def one(res: Tree, bodyPt: Type, matchPt: Type): Tree = (__match(vpmName.one)) (res) - // __match.zero - def zero: Tree = __match(vpmName.zero) - // __match.guard(`c`, `then`) - def guard(c: Tree, then: Tree, tp: Type): Tree = __match(vpmName.guard) APPLY (c, then) + trait OptimizedCodegen extends CodegenCore with TypedSubstitution with OptimizedMatchMonadInterface { + override def codegen: AbsCodegen = optimizedCodegen - //// methods in the monad instance -- used directly in translation - // `prev`.flatMap(`b` => `next`) - def flatMap(prev: Tree, b: Symbol, next: Tree): Tree = (prev DOT vpmName.flatMap)(fun(b, next)) - // `thisCase`.orElse(`elseCase`) - def typedOrElse(pt: Type)(thisCase: Tree, elseCase: Tree): Tree = (thisCase DOT vpmName.orElse) APPLY (elseCase) - // __match.guard(`cond`, `res`).flatMap(`nextBinder` => `next`) - def flatMapCond(cond: Tree, res: Tree, nextBinder: Symbol, nextBinderTp: Type, next: Tree): Tree = flatMap(guard(cond, res, nextBinderTp), nextBinder, next) - // __match.guard(`guardTree`, ()).flatMap((_: P[Unit]) => `next`) - def flatMapGuard(guardTree: Tree, next: Tree): Tree = flatMapCond(guardTree, CODE.UNIT, freshSym(guardTree.pos, pureType(UnitClass.tpe)), pureType(UnitClass.tpe), next) - } + // trait AbsOptimizedCodegen extends AbsCodegen { + // def flatMapCondStored(cond: Tree, condSym: Symbol, res: Tree, nextBinder: Symbol, next: Tree): Tree + // } + // def optimizedCodegen: AbsOptimizedCodegen // when we know we're targetting Option, do some inlining the optimizer won't do // for example, `o.flatMap(f)` becomes `if(o == None) None else f(o.get)`, similarly for orElse and guard // this is a special instance of the advanced inlining optimization that takes a method call on // an object of a type that only has two concrete subclasses, and inlines both bodies, guarded by an if to distinguish the two cases - class OptimizedCodeGen extends CommonCodeGen with AbsOptimizedCodeGen { + object optimizedCodegen extends CommonCodegen /*with AbsOptimizedCodegen*/ { import CODE._ lazy val zeroSym = freshSym(NoPosition, optionType(NothingClass.tpe), "zero") /** Inline runOrElse and get rid of Option allocations @@ -1493,7 +1592,7 @@ class Foo(x: Other) { x._1 } // no error in this order BLOCK( VAL(prevSym) === prev, - IF (prevSym DOT isEmpty) THEN zero ELSE typedSubst(next, List(b), List(prevSym DOT get)) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor) + IF (prevSym DOT isEmpty) THEN zero ELSE Substitution(b, prevSym DOT get)(next) // must be isEmpty and get as we don't control the target of the call (could be the result of a user-defined extractor) ) } @@ -1520,91 +1619,20 @@ class Foo(x: Other) { x._1 } // no error in this order def flatMapGuard(guardTree: Tree, next: Tree): Tree = IF (guardTree) THEN next ELSE zero } + } - @inline private def typedIfOrigTyped(to: Tree, origTp: Type): Tree = - if (origTp == null || origTp == NoType) to - // important: only type when actually substing and when original tree was typed - // (don't need to use origTp as the expected type, though, and can't always do this anyway due to unknown type params stemming from polymorphic extractors) - else typed(to, EXPRmode, WildcardType) - - // We must explicitly type the trees that we replace inside some other tree, since the latter may already have been typed, - // and will thus not be retyped. This means we might end up with untyped subtrees inside bigger, typed trees. - def typedSubst(tree: Tree, from: List[Symbol], to: List[Tree]): Tree = { - // according to -Ystatistics 10% of translateMatch's time is spent in this method... - // since about half of the typedSubst's end up being no-ops, the check below shaves off 5% of the time spent in typedSubst - if (!tree.exists { case i@Ident(_) => from contains i.symbol case _ => false}) tree - else (new Transformer { - override def transform(tree: Tree): Tree = { - def subst(from: List[Symbol], to: List[Tree]): Tree = - if (from.isEmpty) tree - else if (tree.symbol == from.head) typedIfOrigTyped(to.head.shallowDuplicate, tree.tpe) - else subst(from.tail, to.tail) - - tree match { - case Ident(_) => subst(from, to) - case _ => super.transform(tree) - } - } - }).transform(tree) - } - - var ctr = 0 - def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = {ctr += 1; - // assert(owner ne null) - // assert(owner ne NoSymbol) - NoSymbol.newTermSymbol(vpmName.counted(prefix, ctr), pos) setInfo repackExistential(tp) - } - - def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match { - case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args) - case _ => tp - } - - - def typesConform(tp: Type, pt: Type) = ((tp eq pt) || (tp <:< pt)) - - abstract class CommonCodeGen extends AbsCodeGen { - def fun(arg: Symbol, body: Tree): Tree = Function(List(ValDef(arg)), body) - def genTypeApply(tfun: Tree, args: Type*): Tree = if(args contains NoType) tfun else TypeApply(tfun, args.toList map TypeTree) - def tupleSel(binder: Symbol)(i: Int): Tree = (REF(binder) DOT nme.productAccessorName(i)) // make tree that accesses the i'th component of the tuple referenced by binder - def index(tgt: Tree)(i: Int): Tree = tgt APPLY (LIT(i)) - def drop(tgt: Tree)(n: Int): Tree = (tgt DOT vpmName.drop) (LIT(n)) - def _equals(checker: Tree, binder: Symbol): Tree = checker MEMBER_== REF(binder) // NOTE: checker must be the target of the ==, that's the patmat semantics for ya - def and(a: Tree, b: Tree): Tree = a AND b - def condOptimized(c: Tree, then: Tree): Tree = IF (c) THEN then ELSE zero - - // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) - def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp) - if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX) - else gen.mkAsInstanceOf(t, tpX, true, false) - } - - def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false) - // { val tpX = repackExistential(tp) - // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } - // else gen.mkIsInstanceOf(REF(b), tpX, true, false) - // } - - def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp) - if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX) - else gen.mkAsInstanceOf(REF(b), tpX, true, false) - } - // duplicated out of frustration with cast generation - def mkZero(tp: Type): Tree = { - tp.typeSymbol match { - case UnitClass => Literal(Constant()) - case BooleanClass => Literal(Constant(false)) - case FloatClass => Literal(Constant(0.0f)) - case DoubleClass => Literal(Constant(0.0d)) - case ByteClass => Literal(Constant(0.toByte)) - case ShortClass => Literal(Constant(0.toShort)) - case IntClass => Literal(Constant(0)) - case LongClass => Literal(Constant(0L)) - case CharClass => Literal(Constant(0.toChar)) - case _ => gen.mkAsInstanceOf(Literal(Constant(null)), tp, any = true, wrapInApply = false) // the magic incantation is true/false here - } - } + trait MatchOptimizations extends CommonSubconditionElimination + with DeadCodeElimination + with SwitchEmission + with OptimizedCodegen { self: TreeMakers => + override def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = { + val optCases = doCSE(prevBinder, doDCE(prevBinder, cases, pt), pt) + val toHoist = ( + for (treeMakers <- optCases) + yield treeMakers.collect{case tm: ReusedCondTreeMaker => tm.treesToHoist} + ).flatten.flatten.toList + (optCases, toHoist) } } } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d3ff331f98..84f1d1ed6f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3297,7 +3297,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val owntype = elimAnonymousClass(owntype0) if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype)) - (new MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match { + (MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match { case Block(vd :: Nil, tree@Match(selector, cases)) => val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) var cases1 = typedCases(tree, cases, packCaptured(selector1.tpe.widen), pt) -- cgit v1.2.3 From f411950e43fbf56306f96bbc8803ba278d25f440 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Feb 2012 13:13:55 -0800 Subject: matchesType fixup. --- src/compiler/scala/reflect/internal/Types.scala | 12 ++++++------ 1 file changed, 6 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index c8b960ebe8..2c01008efe 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -5459,8 +5459,8 @@ trait Types extends api.Types { self: SymbolTable => matchesType(tp1, res2, true) case MethodType(_, _) => false - case PolyType(tparams2, res2) => - tparams2.isEmpty && matchesType(tp1, res2, alwaysMatchSimple) + case PolyType(_, _) => + false case _ => alwaysMatchSimple || tp1 =:= tp2 } @@ -5478,7 +5478,7 @@ trait Types extends api.Types { self: SymbolTable => case ExistentialType(_, res2) => alwaysMatchSimple && matchesType(tp1, res2, true) case TypeRef(_, sym, Nil) => - params1.isEmpty && sym.isModuleClass && matchesType(res1, sym.tpe, alwaysMatchSimple) + params1.isEmpty && sym.isModuleClass && matchesType(res1, tp2, alwaysMatchSimple) case _ => false } @@ -5491,7 +5491,7 @@ trait Types extends api.Types { self: SymbolTable => case ExistentialType(_, res2) => alwaysMatchSimple && matchesType(tp1, res2, true) case TypeRef(_, sym, Nil) if sym.isModuleClass => - matchesType(res1, sym.tpe, alwaysMatchSimple) + matchesType(res1, tp2, alwaysMatchSimple) case _ => matchesType(res1, tp2, alwaysMatchSimple) } @@ -5514,8 +5514,8 @@ trait Types extends api.Types { self: SymbolTable => } case TypeRef(_, sym, Nil) if sym.isModuleClass => tp2 match { - case MethodType(Nil, res2) => matchesType(sym.tpe, res2, alwaysMatchSimple) - case NullaryMethodType(res2) => matchesType(sym.tpe, res2, alwaysMatchSimple) + case MethodType(Nil, res2) => matchesType(tp1, res2, alwaysMatchSimple) + case NullaryMethodType(res2) => matchesType(tp1, res2, alwaysMatchSimple) case _ => lastTry } case _ => -- cgit v1.2.3 From 355792e85c5b237e5defa6c156336bfbcbf57223 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Feb 2012 14:41:13 -0800 Subject: Extremely hacky tweak to deal with printf portability failure. No leading zeros for %016s on some platforms, yes on others. --- tools/get-scala-revision | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/tools/get-scala-revision b/tools/get-scala-revision index 8d48c8cb78..eac512a010 100755 --- a/tools/get-scala-revision +++ b/tools/get-scala-revision @@ -16,9 +16,12 @@ tag=$(git describe --abbrev=0) # the full string - padding correctness depends on abbrev=10. described=$(git describe --abbrev=10 --always --tags) +suffix="${described##${tag}-}" +counter=$(echo $suffix | cut -d - -f 1) +hash=$(echo $suffix | cut -d - -f 2) # 016 is rocket-surgically-calibrated to pad the distance from the # tag to the current commit into a 4-digit number - since maven # will be treating this as a string, the ide depends on # 10 being greater than 9 (thus 0010 and 00009.) -printf "%s-%016s-%s\n" "$tag" "${described##${tag}-}" $(date "+%Y-%m-%d") +printf "%s-%04d-%10s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d") -- cgit v1.2.3 From 7b153ed4927c171d1e219a5c1d98096f104cc0fe Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Feb 2012 16:59:04 -0800 Subject: Create a reference tag if none exists. --- tools/get-scala-revision | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/tools/get-scala-revision b/tools/get-scala-revision index eac512a010..8441b29070 100755 --- a/tools/get-scala-revision +++ b/tools/get-scala-revision @@ -10,12 +10,21 @@ [[ $# -eq 0 ]] || cd "$1" +ensure_tag () { + # v2.10.0-M1 + fallback=58cb15c40d + [[ -n $(git tag -l v2.10.0-M1) ]] || { + git tag -a -m "generated by get-scala-revision" v2.10.0-M1 $fallback + } +} +ensure_tag + # the closest tag, obtained separately because we have to # reconstruct the string around the padded distance. -tag=$(git describe --abbrev=0) +tag=$(git describe --match 'v2*' --abbrev=0) # the full string - padding correctness depends on abbrev=10. -described=$(git describe --abbrev=10 --always --tags) +described=$(git describe --match 'v2*' --abbrev=10) suffix="${described##${tag}-}" counter=$(echo $suffix | cut -d - -f 1) hash=$(echo $suffix | cut -d - -f 2) -- cgit v1.2.3 From 52e7fdc7efaa4ecfd68f71a51375e1870cc78dcc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 5 Jan 2012 23:33:17 -0800 Subject: Misc optimizations with zip. --- src/compiler/scala/reflect/internal/Types.scala | 2 +- src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala | 3 +-- src/compiler/scala/tools/nsc/typechecker/RefChecks.scala | 4 +--- 3 files changed, 3 insertions(+), 6 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 371fb8d585..4e842c05da 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -5687,7 +5687,7 @@ trait Types extends api.Types { self: SymbolTable => val padded = sorted map (_._2.padTo(maxSeqLength, NoType)) val transposed = padded.transpose - val columns: List[Column[List[Type]]] = sorted.zipWithIndex map { + val columns: List[Column[List[Type]]] = mapWithIndex(sorted) { case ((k, v), idx) => Column(str(k), (xs: List[Type]) => str(xs(idx)), true) } diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 4a104857db..4012d08e42 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -1711,8 +1711,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def makeArguments(fun: Symbol, vparams: List[Symbol]): List[Tree] = ( //! TODO: make sure the param types are seen from the right prefix - for ((tp, arg) <- fun.info.paramTypes zip vparams) yield - gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe) + map2(fun.info.paramTypes, vparams)((tp, arg) => gen.maybeMkAsInstanceOf(Ident(arg), tp, arg.tpe)) ) private def findSpec(tp: Type): Type = tp match { case TypeRef(pre, sym, _ :: _) => specializedType(tp) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index a6c2f75d5e..fa4664b34f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -924,9 +924,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R } def validateVarianceArgs(tps: List[Type], variance: Int, tparams: List[Symbol]) { - (tps zip tparams) foreach { - case (tp, tparam) => validateVariance(tp, variance * tparam.variance) - } + foreach2(tps, tparams)((tp, tparam) => validateVariance(tp, variance * tparam.variance)) } validateVariance(base.info, CoVariance) -- cgit v1.2.3 From f54c2758091c1988ea5e44a3ccbc3b7b9fdddad7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Feb 2012 20:48:50 -0800 Subject: Cleanups in classfile parser symbol creation. --- .../nsc/symtab/classfile/ClassfileParser.scala | 36 +++++++++++----------- 1 file changed, 18 insertions(+), 18 deletions(-) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index ac6dca4422..9c0670e981 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -238,9 +238,9 @@ abstract class ClassfileParser { val index = in.getChar(start + 1) val name = getExternalName(in.getChar(starts(index) + 1)) //assert(name.endsWith("$"), "Not a module class: " + name) - f = forceMangledName(name.subName(0, name.length - 1), true) + f = forceMangledName(name dropRight 1, true) if (f == NoSymbol) - f = definitions.getModule(name.subName(0, name.length - 1)) + f = definitions.getModule(name dropRight 1) } else { val origName = nme.originalName(name) val owner = if (static) ownerTpe.typeSymbol.linkedClassOfClass else ownerTpe.typeSymbol @@ -1074,27 +1074,27 @@ abstract class ClassfileParser { } def enterClassAndModule(entry: InnerClassEntry, completer: global.loaders.SymbolLoader, jflags: Int) { - val name = entry.originalName - var sflags = toScalaClassFlags(jflags) + val name = entry.originalName + var sflags = toScalaClassFlags(jflags) + val owner = getOwner(jflags) + val scope = getScope(jflags) + val innerClass = owner.newClass(name.toTypeName, NoPosition, sflags) setInfo completer + val innerModule = owner.newModule(name.toTermName, NoPosition, sflags) setInfo completer - val innerClass = getOwner(jflags).newClass(name.toTypeName).setInfo(completer).setFlag(sflags) - val innerModule = getOwner(jflags).newModule(name.toTermName).setInfo(completer).setFlag(sflags) innerModule.moduleClass setInfo global.loaders.moduleClassLoader - - getScope(jflags) enter innerClass - getScope(jflags) enter innerModule + scope enter innerClass + scope enter innerModule val decls = innerClass.enclosingPackage.info.decls - val e = decls.lookupEntry(className(entry.externalName)) - if (e ne null) { - //println("removing " + e) - decls.unlink(e) - } - val e1 = decls.lookupEntry(className(entry.externalName).toTypeName) - if (e1 ne null) { - //println("removing " + e1) - decls.unlink(e1) + def unlinkIfPresent(name: Name) = { + val e = decls lookupEntry name + if (e ne null) + decls unlink e } + + val cName = className(entry.externalName) + unlinkIfPresent(cName.toTermName) + unlinkIfPresent(cName.toTypeName) } for (entry <- innerClasses.values) { -- cgit v1.2.3 From a1e2a94da3002af88c9e5cdb56de3f8da9b8023a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 2 Feb 2012 20:34:31 -0800 Subject: Updated Flags toString/documentation. In a stunningly unusual demonstration of farsightedness, I was able to generate these changes only by running: scala scala.tools.nsc.util.FlagsUtilCompiler With this much time in between runs: -// Generated by mkFlagsTable() at Mon Oct 11 10:01:09 PDT 2010 +// Generated by mkFlagsTable() at Thu Feb 02 20:31:52 PST 2012 --- src/compiler/scala/reflect/internal/Flags.scala | 36 ++++++++++++------------- test/files/buildmanager/t2562/t2562.check | 10 +++---- test/files/buildmanager/t2649/t2649.check | 4 +-- test/files/buildmanager/t2651_4/t2651_4.check | 4 +-- test/files/buildmanager/t2652/t2652.check | 4 +-- test/files/buildmanager/t2653/t2653.check | 2 +- test/files/buildmanager/t2655/t2655.check | 4 +-- test/files/buildmanager/t2657/t2657.check | 4 +-- test/files/buildmanager/t2789/t2789.check | 4 +-- 9 files changed, 36 insertions(+), 36 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala index 9e4f0431c3..66af92be5f 100644 --- a/src/compiler/scala/reflect/internal/Flags.scala +++ b/src/compiler/scala/reflect/internal/Flags.scala @@ -13,7 +13,7 @@ import scala.collection.{ mutable, immutable } // Parsers/JavaParsers and therefore definitely appear on Modifiers; but the // absence of /M on the other flags does not imply they aren't. // -// Generated by mkFlagsTable() at Mon Oct 11 10:01:09 PDT 2010 +// Generated by mkFlagsTable() at Thu Feb 02 20:31:52 PST 2012 // // 0: PROTECTED/M // 1: OVERRIDE/M @@ -30,7 +30,7 @@ import scala.collection.{ mutable, immutable } // 12: MUTABLE/M // 13: PARAM/M // 14: PACKAGE -// 15: +// 15: MACRO/M // 16: BYNAMEPARAM/M CAPTURED COVARIANT/M // 17: CONTRAVARIANT/M INCONSTRUCTOR LABEL // 18: ABSOVERRIDE/M @@ -59,13 +59,13 @@ import scala.collection.{ mutable, immutable } // 41: DEFAULTINIT/M // 42: VBRIDGE // 43: VARARGS -// 44: +// 44: TRIEDCOOKING // 45: // 46: // 47: // 48: -// 49: latePRIVATE (eliminated) -// 50: lateABSTRACT (eliminated) +// 49: +// 50: // 51: lateDEFERRED // 52: lateFINAL // 53: lateMETHOD @@ -74,10 +74,10 @@ import scala.collection.{ mutable, immutable } // 56: notPROTECTED // 57: notOVERRIDE // 58: notPRIVATE -// 59: notABSTRACT (eliminated) -// 60: notDEFERRED (eliminated) -// 61: notFINAL (eliminated) -// 62: notMETHOD (eliminated) +// 59: +// 60: +// 61: +// 62: // 63: /** Flags set on Modifiers instances in the parsing stage. @@ -337,7 +337,7 @@ class Flags extends ModifierFlags { // ------ displaying flags -------------------------------------------------------- - // Generated by mkFlagToStringMethod() at Mon Oct 11 10:12:36 PDT 2010 + // Generated by mkFlagToStringMethod() at Thu Feb 02 20:31:52 PST 2012 @annotation.switch override def flagToString(flag: Long): String = flag match { case PROTECTED => "protected" // (1L << 0) case OVERRIDE => "override" // (1L << 1) @@ -354,7 +354,7 @@ class Flags extends ModifierFlags { case MUTABLE => "" // (1L << 12) case PARAM => "" // (1L << 13) case PACKAGE => "" // (1L << 14) - case MACRO => "macro" // (1L << 15) + case MACRO => "" // (1L << 15) case BYNAMEPARAM => "" // (1L << 16) case CONTRAVARIANT => "" // (1L << 17) case ABSOVERRIDE => "absoverride" // (1L << 18) @@ -383,13 +383,13 @@ class Flags extends ModifierFlags { case DEFAULTINIT => "" // (1L << 41) case VBRIDGE => "" // (1L << 42) case VARARGS => "" // (1L << 43) - case 0x100000000000L => "" // (1L << 44) + case TRIEDCOOKING => "" // (1L << 44) case 0x200000000000L => "" // (1L << 45) case 0x400000000000L => "" // (1L << 46) case 0x800000000000L => "" // (1L << 47) case 0x1000000000000L => "" // (1L << 48) - // case `latePRIVATE` => "" // (1L << 49) - // case `lateABSTRACT` => "" // (1L << 50) + case 0x2000000000000L => "" // (1L << 49) + case 0x4000000000000L => "" // (1L << 50) case `lateDEFERRED` => "" // (1L << 51) case `lateFINAL` => "" // (1L << 52) case `lateMETHOD` => "" // (1L << 53) @@ -398,10 +398,10 @@ class Flags extends ModifierFlags { case `notPROTECTED` => "" // (1L << 56) case `notOVERRIDE` => "" // (1L << 57) case `notPRIVATE` => "" // (1L << 58) - // case `notABSTRACT` => "" // (1L << 59) - // case `notDEFERRED` => "" // (1L << 60) - // case `notFINAL` => "" // (1L << 61) - // case `notMETHOD` => "" // (1L << 62) + case 0x800000000000000L => "" // (1L << 59) + case 0x1000000000000000L => "" // (1L << 60) + case 0x2000000000000000L => "" // (1L << 61) + case 0x4000000000000000L => "" // (1L << 62) case 0x8000000000000000L => "" // (1L << 63) case _ => "" } diff --git a/test/files/buildmanager/t2562/t2562.check b/test/files/buildmanager/t2562/t2562.check index 390bbb9986..74575f28ea 100644 --- a/test/files/buildmanager/t2562/t2562.check +++ b/test/files/buildmanager/t2562/t2562.check @@ -3,10 +3,10 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ]] +Changes: Map(object A -> List(Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ])) +invalidate B.scala because it references changed definition [Changed(Definition(A.x3))[method x3 changed from ()Int to ()String flags: ]] compiling Set(B.scala) -Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ])) -invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ]] +Changes: Map(object B -> List(Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ])) +invalidate A.scala because it references changed definition [Changed(Definition(B.x2))[method x2 changed from ()Int to ()String flags: ]] compiling Set(A.scala, B.scala) -Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: ], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: ]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags: ])) +Changes: Map(object A -> List(Changed(Definition(A.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(A.x1))[method x1 changed from ()Int to ()String flags: ], Changed(Definition(A.x2))[method x2 changed from ()Int to ()String flags: ]), object B -> List(Changed(Definition(B.x0))[method x0 changed from ()Int to ()String flags: ], Changed(Definition(B.x1))[method x1 changed from ()Int to ()String flags: ])) diff --git a/test/files/buildmanager/t2649/t2649.check b/test/files/buildmanager/t2649/t2649.check index 390f284fd3..d0f41f32ec 100644 --- a/test/files/buildmanager/t2649/t2649.check +++ b/test/files/buildmanager/t2649/t2649.check @@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ]] +Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ])) +invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (zz: Int, yy: Int)Int to (yy: Int, zz: Int)Int flags: ]] compiling Set(B.scala) Changes: Map(object B -> List()) diff --git a/test/files/buildmanager/t2651_4/t2651_4.check b/test/files/buildmanager/t2651_4/t2651_4.check index c4ce382b5f..b182f31c09 100644 --- a/test/files/buildmanager/t2651_4/t2651_4.check +++ b/test/files/buildmanager/t2651_4/t2651_4.check @@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: ], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: ], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: ]] +Changes: Map(trait A -> List(Changed(Definition(A.x))[method x changed from ()T to ()T flags: ], Changed(Definition(A.y))[method y changed from (a: T)Unit to (a: T)Unit flags: ], Changed(Definition(A.z))[method z changed from [B <: T]()Unit to [B <: T]()Unit flags: ])) +invalidate B.scala because inherited method changed [Changed(Definition(A.x))[method x changed from ()T to ()T flags: ]] compiling Set(B.scala) B.scala:2: error: type mismatch; found : Int(3) diff --git a/test/files/buildmanager/t2652/t2652.check b/test/files/buildmanager/t2652/t2652.check index f517f9e95a..071281c6ff 100644 --- a/test/files/buildmanager/t2652/t2652.check +++ b/test/files/buildmanager/t2652/t2652.check @@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ]] +Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ])) +invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ]] compiling Set(B.scala) Changes: Map(object B -> List()) diff --git a/test/files/buildmanager/t2653/t2653.check b/test/files/buildmanager/t2653/t2653.check index 6a4fc0e982..36781522af 100644 --- a/test/files/buildmanager/t2653/t2653.check +++ b/test/files/buildmanager/t2653/t2653.check @@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.))[constructor A changed from ()A[T] to ()A[T] flags: ])) +Changes: Map(class A -> List(Changed(Class(A))[ tparams: List((type T,type T))], Changed(Definition(A.))[constructor A changed from ()A[T] to ()A[T] flags: ])) invalidate B.scala because it references changed class [Changed(Class(A))[ tparams: List((type T,type T))]] compiling Set(B.scala) B.scala:2: error: type mismatch; diff --git a/test/files/buildmanager/t2655/t2655.check b/test/files/buildmanager/t2655/t2655.check index c473e9fd6e..41ce65a2f5 100644 --- a/test/files/buildmanager/t2655/t2655.check +++ b/test/files/buildmanager/t2655/t2655.check @@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ])) -invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ]] +Changes: Map(object A -> List(Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ])) +invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from (i: Function0)Unit to (i: Function0)Unit flags: ]] compiling Set(B.scala) B.scala:2: error: type mismatch; found : String("3") diff --git a/test/files/buildmanager/t2657/t2657.check b/test/files/buildmanager/t2657/t2657.check index 3fd0e0666d..74ba87a21d 100644 --- a/test/files/buildmanager/t2657/t2657.check +++ b/test/files/buildmanager/t2657/t2657.check @@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ])) -invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ]] +Changes: Map(class A -> List(Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ])) +invalidate B.scala because inherited method changed [Changed(Definition(A.y))[method y changed from (i: Int)String to (i: Int)String flags: implicit ]] compiling Set(B.scala) B.scala:2: error: type mismatch; found : Int(3) diff --git a/test/files/buildmanager/t2789/t2789.check b/test/files/buildmanager/t2789/t2789.check index 78c5119355..a7c767cc45 100644 --- a/test/files/buildmanager/t2789/t2789.check +++ b/test/files/buildmanager/t2789/t2789.check @@ -3,8 +3,8 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]), class E -> List()) -invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]] +Changes: Map(class A -> List(Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]), class E -> List()) +invalidate B.scala because inherited method changed [Changed(Definition(A.e))[method e changed from ()E to ()E flags: implicit ]] compiling Set(B.scala) B.scala:2: error: could not find implicit value for parameter y: E val y = x(3) -- cgit v1.2.3 From 86946630e9a1240fb9a378b2ec62e78b521f4320 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Fri, 3 Feb 2012 15:05:56 +0100 Subject: Fix some issues in parallel Ctrie. This change resolves some issues with ParCtrie splitters and their `remaining` method, which currently evaluates the size of the Ctrie. Since this is still not done lazily, nor in parallel, it has a certain cost, which is unacceptable. Change #1: The `shouldSplitFurther` method is by default implemented by calling the `remaining` method. This method now forwards the call to the same method in the splitter which is by default implemented in the same way as before, but can be overridden by custom collections such as the ParCtrie. Change #2: ParCtrie splitter now has a `level` member which just counts how many times the method has been split. This information is used to override the default `shouldSplitFurther` implementation. Change #3: The tasks and splitters rely heavily on the `remaining` method in the splitter for most operations. There is an additional method called `isRemainingCheap` which returns true by default, but can be overridden by custom collections such as the `Ctrie`. --- src/library/scala/collection/mutable/Ctrie.scala | 24 +++++++++++++++---- .../collection/parallel/ParIterableLike.scala | 27 ++-------------------- .../collection/parallel/RemainsIterator.scala | 27 ++++++++++++++-------- .../collection/parallel/mutable/ParCtrie.scala | 20 +++++++++++----- test/benchmarking/ParCtrie-map.scala | 21 +++++++++++++++++ test/benchmarking/TreeSetInsert.scala | 2 ++ 6 files changed, 75 insertions(+), 46 deletions(-) create mode 100644 test/benchmarking/ParCtrie-map.scala diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index e1a72d9511..6ed3a516c4 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -13,6 +13,7 @@ package mutable import java.util.concurrent.atomic._ import collection.immutable.{ ListMap => ImmutableListMap } +import collection.parallel.mutable.ParCtrie import generic._ import annotation.tailrec import annotation.switch @@ -578,6 +579,8 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai * iterator and clear operations. The cost of evaluating the (lazy) snapshot is * distributed across subsequent updates, thus making snapshot evaluation horizontally scalable. * + * For details, see: http://lampwww.epfl.ch/~prokopec/ctries-snapshot.pdf + * * @author Aleksandar Prokopec * @since 2.10 */ @@ -585,6 +588,7 @@ private[mutable] case class RDCSS_Descriptor[K, V](old: INode[K, V], expectedmai final class Ctrie[K, V] private (r: AnyRef, rtupd: AtomicReferenceFieldUpdater[Ctrie[K, V], AnyRef]) extends ConcurrentMap[K, V] with MapLike[K, V, Ctrie[K, V]] + with CustomParallelizable[(K, V), ParCtrie[K, V]] with Serializable { import Ctrie.computeHash @@ -710,6 +714,10 @@ extends ConcurrentMap[K, V] /* public methods */ + override def seq = this + + override def par = new ParCtrie(this) + override def empty: Ctrie[K, V] = new Ctrie[K, V] @inline final def isReadOnly = rootupdater eq null @@ -820,7 +828,7 @@ extends ConcurrentMap[K, V] def iterator: Iterator[(K, V)] = if (nonReadOnly) readOnlySnapshot().iterator - else new CtrieIterator(this) + else new CtrieIterator(0, this) override def stringPrefix = "Ctrie" @@ -844,7 +852,7 @@ object Ctrie extends MutableMapFactory[Ctrie] { } -private[collection] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { +private[collection] class CtrieIterator[K, V](var level: Int, ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { var stack = new Array[Array[BasicNode]](7) var stackpos = new Array[Int](7) var depth = -1 @@ -910,7 +918,7 @@ private[collection] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean } } else current = null - protected def newIterator(_ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_ct, _mustInit) + protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit) /** Returns a sequence of iterators over subsets of this iterator. * It's used to ease the implementation of splitters for a parallel version of the Ctrie. @@ -920,8 +928,12 @@ private[collection] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean val it = subiter subiter = null advance() + this.level += 1 Seq(it, this) - } else if (depth == -1) Seq(this) else { + } else if (depth == -1) { + this.level += 1 + Seq(this) + } else { var d = 0 while (d <= depth) { val rem = stack(d).length - 1 - stackpos(d) @@ -929,15 +941,17 @@ private[collection] class CtrieIterator[K, V](ct: Ctrie[K, V], mustInit: Boolean val (arr1, arr2) = stack(d).drop(stackpos(d) + 1).splitAt(rem / 2) stack(d) = arr1 stackpos(d) = -1 - val it = newIterator(ct, false) + val it = newIterator(level + 1, ct, false) it.stack(0) = arr2 it.stackpos(0) = -1 it.depth = 0 it.advance() // <-- fix it + this.level += 1 return Seq(this, it) } d += 1 } + this.level += 1 Seq(this) } diff --git a/src/library/scala/collection/parallel/ParIterableLike.scala b/src/library/scala/collection/parallel/ParIterableLike.scala index 32e0e8a8ed..7c5a835e56 100644 --- a/src/library/scala/collection/parallel/ParIterableLike.scala +++ b/src/library/scala/collection/parallel/ParIterableLike.scala @@ -96,17 +96,6 @@ import annotation.unchecked.uncheckedVariance * The combination of methods `toMap`, `toSeq` or `toSet` along with `par` and `seq` is a flexible * way to change between different collection types. * - * The method: - * - * {{{ - * def threshold(sz: Int, p: Int): Int - * }}} - * - * provides an estimate on the minimum number of elements the collection has before - * the splitting stops and depends on the number of elements in the collection. A rule of the - * thumb is the number of elements divided by 8 times the parallelism level. This method may - * be overridden in concrete implementations if necessary. - * * Since this trait extends the `Iterable` trait, methods like `size` must also * be implemented in concrete collections, while `iterator` forwards to `splitter` by * default. @@ -206,18 +195,6 @@ self: ParIterableLike[T, Repr, Sequential] => */ def isStrictSplitterCollection = true - /** Some minimal number of elements after which this collection should be handled - * sequentially by different processors. - * - * This method depends on the size of the collection and the parallelism level, which - * are both specified as arguments. - * - * @param sz the size based on which to compute the threshold - * @param p the parallelism level based on which to compute the threshold - * @return the maximum number of elements for performing operations sequentially - */ - def threshold(sz: Int, p: Int): Int = thresholdFromSize(sz, p) - /** The `newBuilder` operation returns a parallel builder assigned to this collection's fork/join pool. * This method forwards the call to `newCombiner`. */ @@ -833,7 +810,7 @@ self: ParIterableLike[T, Repr, Sequential] => extends StrictSplitterCheckTask[R, Tp] { protected[this] val pit: IterableSplitter[T] protected[this] def newSubtask(p: IterableSplitter[T]): Accessor[R, Tp] - def shouldSplitFurther = pit.remaining > threshold(size, parallelismLevel) + def shouldSplitFurther = pit.shouldSplitFurther(self.repr, parallelismLevel) def split = pit.splitWithSignalling.map(newSubtask(_)) // default split procedure private[parallel] override def signalAbort = pit.abort override def toString = this.getClass.getSimpleName + "(" + pit.toString + ")(" + result + ")(supername: " + super.toString + ")" @@ -1362,7 +1339,7 @@ self: ParIterableLike[T, Repr, Sequential] => /* scan tree */ - protected[this] def scanBlockSize = (threshold(size, parallelismLevel) / 2) max 1 + protected[this] def scanBlockSize = (thresholdFromSize(size, parallelismLevel) / 2) max 1 protected[this] trait ScanTree[U >: T] { def beginsAt: Int diff --git a/src/library/scala/collection/parallel/RemainsIterator.scala b/src/library/scala/collection/parallel/RemainsIterator.scala index e8b332da89..8ed4583419 100644 --- a/src/library/scala/collection/parallel/RemainsIterator.scala +++ b/src/library/scala/collection/parallel/RemainsIterator.scala @@ -28,6 +28,11 @@ private[collection] trait RemainsIterator[+T] extends Iterator[T] { * This method doesn't change the state of the iterator. */ def remaining: Int + + /** For most collections, this is a cheap operation. + * Exceptions can override this method. + */ + def isRemainingCheap = true } @@ -112,7 +117,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def map2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = pbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) while (hasNext) cb += f(next) cb } @@ -137,7 +142,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def copy2builder[U >: T, Coll, Bld <: Builder[U, Coll]](b: Bld): Bld = { - b.sizeHint(remaining) + if (isRemainingCheap) b.sizeHint(remaining) while (hasNext) b += next b } @@ -179,7 +184,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def drop2combiner[U >: T, This](n: Int, cb: Combiner[U, This]): Combiner[U, This] = { drop(n) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) while (hasNext) cb += next cb } @@ -197,7 +202,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ def splitAt2combiners[U >: T, This](at: Int, before: Combiner[U, This], after: Combiner[U, This]) = { before.sizeHint(at) - after.sizeHint(remaining - at) + if (isRemainingCheap) after.sizeHint(remaining - at) var left = at while (left > 0) { before += next @@ -223,7 +228,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ val curr = next if (p(curr)) before += curr else { - after.sizeHint(remaining + 1) + if (isRemainingCheap) after.sizeHint(remaining + 1) after += curr isBefore = false } @@ -263,7 +268,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def zip2combiner[U >: T, S, That](otherpit: RemainsIterator[S], cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - cb.sizeHint(remaining min otherpit.remaining) + if (isRemainingCheap && otherpit.isRemainingCheap) cb.sizeHint(remaining min otherpit.remaining) while (hasNext && otherpit.hasNext) { cb += ((next, otherpit.next)) } @@ -271,7 +276,7 @@ private[collection] trait AugmentedIterableIterator[+T] extends RemainsIterator[ } def zipAll2combiner[U >: T, S, That](that: RemainsIterator[S], thiselem: U, thatelem: S, cb: Combiner[(U, S), That]): Combiner[(U, S), That] = { - cb.sizeHint(remaining max that.remaining) + if (isRemainingCheap && that.isRemainingCheap) cb.sizeHint(remaining max that.remaining) while (this.hasNext && that.hasNext) cb += ((this.next, that.next)) while (this.hasNext) cb += ((this.next, thatelem)) while (that.hasNext) cb += ((thiselem, that.next)) @@ -330,7 +335,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter /* transformers */ def reverse2combiner[U >: T, This](cb: Combiner[U, This]): Combiner[U, This] = { - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[T]() while (hasNext) lst ::= next while (lst != Nil) { @@ -342,7 +347,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def reverseMap2combiner[S, That](f: T => S, cb: Combiner[S, That]): Combiner[S, That] = { //val cb = cbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var lst = List[S]() while (hasNext) lst ::= f(next) while (lst != Nil) { @@ -354,7 +359,7 @@ private[collection] trait AugmentedSeqIterator[+T] extends AugmentedIterableIter def updated2combiner[U >: T, That](index: Int, elem: U, cb: Combiner[U, That]): Combiner[U, That] = { //val cb = cbf(repr) - cb.sizeHint(remaining) + if (isRemainingCheap) cb.sizeHint(remaining) var j = 0 while (hasNext) { if (j == index) { @@ -395,6 +400,8 @@ self => pits } + def shouldSplitFurther[S](coll: ParIterable[S], parallelismLevel: Int) = remaining > thresholdFromSize(coll.size, parallelismLevel) + /** The number of elements this iterator has yet to traverse. This method * doesn't change the state of the iterator. * diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala index d8c060e719..86624500fd 100644 --- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -27,7 +27,7 @@ import scala.collection.mutable.CtrieIterator * @author Aleksandar Prokopec * @since 2.10 */ -final class ParCtrie[K, V] private[mutable] (private val ctrie: Ctrie[K, V]) +final class ParCtrie[K, V] private[collection] (private val ctrie: Ctrie[K, V]) extends ParMap[K, V] with GenericParMapTemplate[K, V, ParCtrie] with ParMapLike[K, V, ParCtrie[K, V], Ctrie[K, V]] @@ -45,7 +45,7 @@ extends ParMap[K, V] override def seq = ctrie - def splitter = new ParCtrieSplitter(ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true) + def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true) override def size = ctrie.size @@ -76,15 +76,21 @@ extends ParMap[K, V] } -private[collection] class ParCtrieSplitter[K, V](ct: Ctrie[K, V], mustInit: Boolean) -extends CtrieIterator[K, V](ct, mustInit) +private[collection] class ParCtrieSplitter[K, V](lev: Int, ct: Ctrie[K, V], mustInit: Boolean) +extends CtrieIterator[K, V](lev, ct, mustInit) with IterableSplitter[(K, V)] { // only evaluated if `remaining` is invoked (which is not used by most tasks) - lazy val totalsize = ct.iterator.size // TODO improve to lazily compute sizes + //lazy val totalsize = ct.iterator.size /* TODO improve to lazily compute sizes */ + def totalsize: Int = throw new UnsupportedOperationException var iterated = 0 - protected override def newIterator(_ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_ct, _mustInit) + protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit) + + override def shouldSplitFurther[S](coll: collection.parallel.ParIterable[S], parallelismLevel: Int) = { + val maxsplits = 3 + Integer.highestOneBit(parallelismLevel) + level < maxsplits + } def dup = null // TODO necessary for views @@ -95,6 +101,8 @@ extends CtrieIterator[K, V](ct, mustInit) def split: Seq[IterableSplitter[(K, V)]] = subdivide().asInstanceOf[Seq[IterableSplitter[(K, V)]]] + override def isRemainingCheap = false + def remaining: Int = totalsize - iterated } diff --git a/test/benchmarking/ParCtrie-map.scala b/test/benchmarking/ParCtrie-map.scala new file mode 100644 index 0000000000..c8de99f33e --- /dev/null +++ b/test/benchmarking/ParCtrie-map.scala @@ -0,0 +1,21 @@ + + + +import collection.parallel.mutable.ParCtrie + + + +object Map extends testing.Benchmark { + val length = sys.props("length").toInt + val par = sys.props("par").toInt + val parctrie = ParCtrie((0 until length) zip (0 until length): _*) + + collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par) + + def run = { + parctrie map { + kv => kv + } + } +} + diff --git a/test/benchmarking/TreeSetInsert.scala b/test/benchmarking/TreeSetInsert.scala index 9ede8aedc5..23444aa305 100644 --- a/test/benchmarking/TreeSetInsert.scala +++ b/test/benchmarking/TreeSetInsert.scala @@ -33,6 +33,7 @@ object JavaUtilTS extends testing.Benchmark { } } + object MutableTS extends testing.Benchmark { val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray @@ -50,6 +51,7 @@ object MutableTS extends testing.Benchmark { } } + object ImmutableTS extends testing.Benchmark { val length = sys.props("length").toInt var data: Array[Dummy] = (0 until length) map { a => new Dummy(a) } toArray -- cgit v1.2.3 From 58ab20ba9b6a17c81f2f89f31f7265d2ed3b166b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 3 Feb 2012 07:04:14 -0800 Subject: Restored build.number. As it is apparently used by maven. We're now at a local minimum of sensibility for the construction of build strings, but the good thing about local minima is that one can reasonably anticipate an upward climb. Or at least flatlining. --- build.number | 5 +++++ 1 file changed, 5 insertions(+) create mode 100644 build.number diff --git a/build.number b/build.number new file mode 100644 index 0000000000..91c7e72c85 --- /dev/null +++ b/build.number @@ -0,0 +1,5 @@ +#Tue Sep 11 19:21:09 CEST 2007 +version.minor=10 +version.patch=0 +version.suffix=alpha +version.major=2 -- cgit v1.2.3 From a015c08fda8b8556345a802d60557a3ecd627ccc Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Fri, 3 Feb 2012 16:58:54 +0100 Subject: Add tests for parallel Ctrie. Changed parameters in some tests to speed them up. --- .../scala/collection/parallel/Combiner.scala | 1 - test/files/jvm/serialization.check | 4 + test/files/jvm/serialization.scala | 5 ++ test/files/run/ctries/iterator.scala | 14 ++-- test/files/scalacheck/avl.scala | 18 ++-- .../parallel-collections/ParallelCtrieCheck.scala | 98 ++++++++++++++++++++++ .../files/scalacheck/parallel-collections/pc.scala | 3 + 7 files changed, 126 insertions(+), 17 deletions(-) create mode 100644 test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala diff --git a/src/library/scala/collection/parallel/Combiner.scala b/src/library/scala/collection/parallel/Combiner.scala index a2cab7eb5d..e304be92ae 100644 --- a/src/library/scala/collection/parallel/Combiner.scala +++ b/src/library/scala/collection/parallel/Combiner.scala @@ -34,7 +34,6 @@ import scala.collection.generic.Sizing */ trait Combiner[-Elem, +To] extends Builder[Elem, To] with Sizing with Parallel { //self: EnvironmentPassingCombiner[Elem, To] => - private[collection] final val tasksupport = getTaskSupport /** Combines the contents of the receiver builder and the `other` builder, * producing a new builder containing both their elements. diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index cdfc100e0d..67b77639a2 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -287,6 +287,10 @@ x = ParHashMap(1 -> 2, 2 -> 4) y = ParHashMap(1 -> 2, 2 -> 4) x equals y: true, y equals x: true +x = ParCtrie(1 -> 2, 2 -> 4) +y = ParCtrie(1 -> 2, 2 -> 4) +x equals y: true, y equals x: true + x = ParHashSet(1, 2, 3) y = ParHashSet(1, 2, 3) x equals y: true, y equals x: true diff --git a/test/files/jvm/serialization.scala b/test/files/jvm/serialization.scala index 4e1ff368ab..75daa8903d 100644 --- a/test/files/jvm/serialization.scala +++ b/test/files/jvm/serialization.scala @@ -613,6 +613,11 @@ object Test9_parallel { val _mpm: mutable.ParHashMap[Int, Int] = read(write(mpm)) check(mpm, _mpm) + // mutable.ParCtrie + val mpc = mutable.ParCtrie(1 -> 2, 2 -> 4) + val _mpc: mutable.ParCtrie[Int, Int] = read(write(mpc)) + check(mpc, _mpc) + // mutable.ParHashSet val mps = mutable.ParHashSet(1, 2, 3) val _mps: mutable.ParHashSet[Int] = read(write(mps)) diff --git a/test/files/run/ctries/iterator.scala b/test/files/run/ctries/iterator.scala index 1cef4f66ea..4bbf9009f0 100644 --- a/test/files/run/ctries/iterator.scala +++ b/test/files/run/ctries/iterator.scala @@ -141,8 +141,8 @@ object IteratorSpec extends Spec { "be consistent when taken with concurrent modifications" in { val sz = 25000 - val W = 25 - val S = 10 + val W = 15 + val S = 5 val checks = 5 val ct = new Ctrie[Wrap, Int] for (i <- 0 until sz) ct.put(new Wrap(i), i) @@ -182,8 +182,8 @@ object IteratorSpec extends Spec { "be consistent with a concurrent removal with a well defined order" in { val sz = 150000 - val sgroupsize = 40 - val sgroupnum = 20 + val sgroupsize = 10 + val sgroupnum = 5 val removerslowdown = 50 val ct = new Ctrie[Wrap, Int] for (i <- 0 until sz) ct.put(new Wrap(i), i) @@ -201,7 +201,7 @@ object IteratorSpec extends Spec { def consistentIteration(it: Iterator[(Wrap, Int)]) = { class Iter extends Thread { override def run() { - val elems = it.toSeq + val elems = it.toBuffer if (elems.nonEmpty) { val minelem = elems.minBy((x: (Wrap, Int)) => x._1.i)._1.i assert(elems.forall(_._1.i >= minelem)) @@ -224,8 +224,8 @@ object IteratorSpec extends Spec { "be consistent with a concurrent insertion with a well defined order" in { val sz = 150000 - val sgroupsize = 30 - val sgroupnum = 30 + val sgroupsize = 10 + val sgroupnum = 10 val inserterslowdown = 50 val ct = new Ctrie[Wrap, Int] diff --git a/test/files/scalacheck/avl.scala b/test/files/scalacheck/avl.scala index 51fb1fe8c3..af79ad49e3 100644 --- a/test/files/scalacheck/avl.scala +++ b/test/files/scalacheck/avl.scala @@ -47,21 +47,21 @@ package scala.collection.mutable { } } - def genInput: Gen[(Int, List[AVLTree[Int]])] = for { - size <- Gen.choose(20, 25) - elements <- Gen.listOfN(size, Gen.choose(0, 1000)) - selected <- Gen.choose(0, 1000) + def genInput: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for { + size <- org.scalacheck.Gen.choose(20, 25) + elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000)) + selected <- org.scalacheck.Gen.choose(0, 1000) } yield { // selected mustn't be in elements already val list = makeAllBalancedTree(elements.sorted.distinct.map(_*2)) (selected*2+1, list) } - def genInputDelete: Gen[(Int, List[AVLTree[Int]])] = for { - size <- Gen.choose(20, 25) - elements <- Gen.listOfN(size, Gen.choose(0, 1000)) + def genInputDelete: org.scalacheck.Gen[(Int, List[AVLTree[Int]])] = for { + size <- org.scalacheck.Gen.choose(20, 25) + elements <- org.scalacheck.Gen.listOfN(size, org.scalacheck.Gen.choose(0, 1000)) e = elements.sorted.distinct - selected <- Gen.choose(0, e.size-1) + selected <- org.scalacheck.Gen.choose(0, e.size-1) } yield { // selected must be in elements already val list = makeAllBalancedTree(e) @@ -111,4 +111,4 @@ package scala.collection.mutable { object Test extends Properties("AVL") { include(scala.collection.mutable.TestInsert) include(scala.collection.mutable.TestRemove) -} \ No newline at end of file +} diff --git a/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala new file mode 100644 index 0000000000..d1924f0ada --- /dev/null +++ b/test/files/scalacheck/parallel-collections/ParallelCtrieCheck.scala @@ -0,0 +1,98 @@ +package scala.collection.parallel +package mutable + + + +import org.scalacheck._ +import org.scalacheck.Gen +import org.scalacheck.Gen._ +import org.scalacheck.Prop._ +import org.scalacheck.Properties +import org.scalacheck.Arbitrary._ + +import scala.collection._ +import scala.collection.parallel.ops._ + + + +abstract class ParallelCtrieCheck[K, V](tp: String) extends ParallelMapCheck[K, V]("mutable.ParCtrie[" + tp + "]") { + // ForkJoinTasks.defaultForkJoinPool.setMaximumPoolSize(Runtime.getRuntime.availableProcessors * 2) + // ForkJoinTasks.defaultForkJoinPool.setParallelism(Runtime.getRuntime.availableProcessors * 2) + + type CollType = ParCtrie[K, V] + + def isCheckingViews = false + + def hasStrictOrder = false + + def ofSize(vals: Seq[Gen[(K, V)]], sz: Int) = { + val ct = new mutable.Ctrie[K, V] + val gen = vals(rnd.nextInt(vals.size)) + for (i <- 0 until sz) ct += sample(gen) + ct + } + + def fromTraversable(t: Traversable[(K, V)]) = { + val pct = new ParCtrie[K, V] + var i = 0 + for (kv <- t.toList) { + pct += kv + i += 1 + } + pct + } + +} + + +object IntIntParallelCtrieCheck extends ParallelCtrieCheck[Int, Int]("Int, Int") +with PairOperators[Int, Int] +with PairValues[Int, Int] +{ + def intvalues = new IntValues {} + def kvalues = intvalues.values + def vvalues = intvalues.values + + val intoperators = new IntOperators {} + def voperators = intoperators + def koperators = intoperators + + override def printDataStructureDebugInfo(ds: AnyRef) = ds match { + case pm: ParCtrie[k, v] => + println("Mutable parallel ctrie") + case _ => + println("could not match data structure type: " + ds.getClass) + } + + override def checkDataStructureInvariants(orig: Traversable[(Int, Int)], ds: AnyRef) = ds match { + // case pm: ParHashMap[k, v] if 1 == 0 => // disabled this to make tests faster + // val invs = pm.brokenInvariants + + // val containsall = (for ((k, v) <- orig) yield { + // if (pm.asInstanceOf[ParHashMap[Int, Int]].get(k) == Some(v)) true + // else { + // println("Does not contain original element: " + (k, v)) + // false + // } + // }).foldLeft(true)(_ && _) + + + // if (invs.isEmpty) containsall + // else { + // println("Invariants broken:\n" + invs.mkString("\n")) + // false + // } + case _ => true + } + +} + + + + + + + + + + diff --git a/test/files/scalacheck/parallel-collections/pc.scala b/test/files/scalacheck/parallel-collections/pc.scala index cc0382303a..8a0dba3c25 100644 --- a/test/files/scalacheck/parallel-collections/pc.scala +++ b/test/files/scalacheck/parallel-collections/pc.scala @@ -25,6 +25,9 @@ class ParCollProperties extends Properties("Parallel collections") { // parallel mutable hash maps (tables) include(mutable.IntIntParallelHashMapCheck) + // parallel ctrie + include(mutable.IntIntParallelCtrieCheck) + // parallel mutable hash sets (tables) include(mutable.IntParallelHashSetCheck) -- cgit v1.2.3 From 4bbfed3f8fda69624ea6a96de52ba973f55900cf Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 3 Feb 2012 10:08:27 -0800 Subject: Updated and added some runners. Easier ways to invoke scala/scalac based on starr, locker, or quick. % tools/starr_scalac -version Scala compiler version 2.10.0.r26060-b20111123092602 -- Copyright 2002-2011, LAMP/EPFL % tools/locker_scalac -version Scala compiler version v2.10.0-M1-0140-g4619a48c1e-2012-02-02 -- Copyright 2002-2011, LAMP/EPFL % tools/quick_scalac -version Scala compiler version v2.10.0-M1-0144-g0c59a25a81-2012-02-02 -- Copyright 2002-2011, LAMP/EPFL --- tools/locker_scala | 8 +++----- tools/locker_scalac | 8 +++----- tools/quick_scala | 6 ++++++ tools/quick_scalac | 6 ++++++ tools/starr_scala | 6 ++++++ tools/starr_scalac | 6 ++++++ 6 files changed, 30 insertions(+), 10 deletions(-) create mode 100755 tools/quick_scala create mode 100755 tools/quick_scalac create mode 100755 tools/starr_scala create mode 100755 tools/starr_scalac diff --git a/tools/locker_scala b/tools/locker_scala index 4434c94bf3..02d2efcdd8 100755 --- a/tools/locker_scala +++ b/tools/locker_scala @@ -1,8 +1,6 @@ -#!/bin/bash +#!/bin/sh # -THISDIR=`dirname $0` -CP=`$THISDIR/lockercp` -CLASS="scala.tools.nsc.MainGenericRunner" +CP=$($(dirname $BASH_SOURCE)/lockercp) -java -classpath "$CP" $CLASS -usejavacp "$@" +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@" diff --git a/tools/locker_scalac b/tools/locker_scalac index 2ad153e929..c4b28b7bc0 100755 --- a/tools/locker_scalac +++ b/tools/locker_scalac @@ -1,8 +1,6 @@ -#!/bin/bash +#!/bin/sh # -THISDIR=`dirname $0` -CP=`$THISDIR/lockercp` -CLASS="scala.tools.nsc.Main" +CP=$($(dirname $BASH_SOURCE)/lockercp) -java -classpath "$CP" $CLASS -usejavacp "$@" +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@" diff --git a/tools/quick_scala b/tools/quick_scala new file mode 100755 index 0000000000..16938ddba4 --- /dev/null +++ b/tools/quick_scala @@ -0,0 +1,6 @@ +#!/bin/sh +# + +CP=$($(dirname $BASH_SOURCE)/quickcp) + +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@" diff --git a/tools/quick_scalac b/tools/quick_scalac new file mode 100755 index 0000000000..1b9a036c18 --- /dev/null +++ b/tools/quick_scalac @@ -0,0 +1,6 @@ +#!/bin/sh +# + +CP=$($(dirname $BASH_SOURCE)/quickcp) + +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@" diff --git a/tools/starr_scala b/tools/starr_scala new file mode 100755 index 0000000000..9b0fb60cf7 --- /dev/null +++ b/tools/starr_scala @@ -0,0 +1,6 @@ +#!/bin/sh +# + +CP=$($(dirname $BASH_SOURCE)/starrcp) + +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.MainGenericRunner -usejavacp "$@" diff --git a/tools/starr_scalac b/tools/starr_scalac new file mode 100755 index 0000000000..972eeaff2b --- /dev/null +++ b/tools/starr_scalac @@ -0,0 +1,6 @@ +#!/bin/sh +# + +CP=$($(dirname $BASH_SOURCE)/starrcp) + +java $JAVA_OPTS -classpath "$CP" scala.tools.nsc.Main -usejavacp "$@" -- cgit v1.2.3 From 2f3370524f2348ab2bdc0665f548ac8c5bf586c7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 3 Feb 2012 16:00:18 -0800 Subject: Injecting epfl-publish into merge-base. --- tools/epfl-publish | 32 ++++++++++++++++++++++++++++++++ 1 file changed, 32 insertions(+) create mode 100755 tools/epfl-publish diff --git a/tools/epfl-publish b/tools/epfl-publish new file mode 100755 index 0000000000..4982f930bb --- /dev/null +++ b/tools/epfl-publish @@ -0,0 +1,32 @@ +#!/usr/bin/env bash +# +# publishes nightly build if $publish_to is set in environment. +# + +[[ $# -eq 1 ]] || { + cat < + +Environment variables: + publish_to rsync destination +EOM + exit 0 +} +version="$1" + +[[ -d dists/archives ]] || { + echo "Can't find build, has it completed? No directory at dists/archives" + exit 1 +} + +if [[ -z $publish_to ]]; then + echo "Nothing to publish." +else + echo "Publishing nightly build to $publish_to" + # Archive Scala nightly distribution + rsync -az dists/archives/ "$publish_to/distributions" + # don't publish docs in 2.8.x + [[ $version == "2.8.x" ]] || rsync -az build/scaladoc/ "$publish_to/docs" + # sbaz + [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz" +fi -- cgit v1.2.3 From b16cbcdf2eeca3e5bcd0601df3524df6d307704b Mon Sep 17 00:00:00 2001 From: Vlad Ureche Date: Fri, 3 Feb 2012 22:59:55 +0100 Subject: Fix 2.9.2 scaladoc replacing nightlies in jenkins --- tools/epfl-publish | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tools/epfl-publish b/tools/epfl-publish index 4982f930bb..de5e17b13f 100755 --- a/tools/epfl-publish +++ b/tools/epfl-publish @@ -25,8 +25,8 @@ else echo "Publishing nightly build to $publish_to" # Archive Scala nightly distribution rsync -az dists/archives/ "$publish_to/distributions" - # don't publish docs in 2.8.x - [[ $version == "2.8.x" ]] || rsync -az build/scaladoc/ "$publish_to/docs" + # only publish scaladoc nightly for trunk + [[ $version == "master" ]] && rsync -az build/scaladoc/ "$publish_to/docs" # sbaz [[ -d dists/sbaz ]] && rsync -az dists/sbaz/ "$publish_to/sbaz" fi -- cgit v1.2.3 From af944bafa9751273e53114ce423e1e2c306c9286 Mon Sep 17 00:00:00 2001 From: Jason Zaugg Date: Sat, 4 Feb 2012 10:19:43 +0100 Subject: An IntelliJ module for the root directory. This allows any file to be be edited, rather than just the ones under library, compiler, actors, ... --- src/intellij/scala-lang.ipr.SAMPLE | 3 ++- src/intellij/scala.iml.SAMPLE | 10 ++++++++++ 2 files changed, 12 insertions(+), 1 deletion(-) create mode 100644 src/intellij/scala.iml.SAMPLE diff --git a/src/intellij/scala-lang.ipr.SAMPLE b/src/intellij/scala-lang.ipr.SAMPLE index 4d32f0e2e1..93b6285cfb 100644 --- a/src/intellij/scala-lang.ipr.SAMPLE +++ b/src/intellij/scala-lang.ipr.SAMPLE @@ -32,7 +32,7 @@ - + @@ -201,6 +201,7 @@ + diff --git a/src/intellij/scala.iml.SAMPLE b/src/intellij/scala.iml.SAMPLE new file mode 100644 index 0000000000..8ea9d0dd71 --- /dev/null +++ b/src/intellij/scala.iml.SAMPLE @@ -0,0 +1,10 @@ + + + + + + + + + + -- cgit v1.2.3 From 9415696be1595d59b9b6316b493fa02ad73d979c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 4 Feb 2012 21:27:15 -0800 Subject: Time-traveled get-scala-revision to 3-way merge base. --- tools/get-scala-revision | 44 ++++++++++++++++++++++++++++---------------- 1 file changed, 28 insertions(+), 16 deletions(-) diff --git a/tools/get-scala-revision b/tools/get-scala-revision index b27b6ddc82..8441b29070 100755 --- a/tools/get-scala-revision +++ b/tools/get-scala-revision @@ -2,23 +2,35 @@ # # Usage: get-scala-revision [dir] # Figures out current scala revision of a git clone. -# # If no dir is given, current working dir is used. +# +# Example build version string: +# v2.10.0-M1-0098-g6f1c486d0b-2012-02-01 +# + +[[ $# -eq 0 ]] || cd "$1" + +ensure_tag () { + # v2.10.0-M1 + fallback=58cb15c40d + [[ -n $(git tag -l v2.10.0-M1) ]] || { + git tag -a -m "generated by get-scala-revision" v2.10.0-M1 $fallback + } +} +ensure_tag -# not like releases come out so often that we are duty-bound -# to recalculate this every time. -# git merge-base v2.8.2 v2.9.1 master -devbase="df13e31bbb" +# the closest tag, obtained separately because we have to +# reconstruct the string around the padded distance. +tag=$(git describe --match 'v2*' --abbrev=0) -# reimplementing git describe hopefully in a way which works -# without any particular tags, branches, or recent versions of git. -# this is supposed to generate -# dev-NNNN-g -# where NNNN is the number of commits since devbase, which -# is the merge-base of the most recent release and master. -# Presently hardcoded to reduce uncertainty, v2.8.2/v2.9.1/master. -commits=$(git --no-pager log --pretty=oneline $devbase..HEAD | wc -l) -sha=$(git rev-list -n 1 HEAD) -datestr=$(date "+%Y-%m-%d") +# the full string - padding correctness depends on abbrev=10. +described=$(git describe --match 'v2*' --abbrev=10) +suffix="${described##${tag}-}" +counter=$(echo $suffix | cut -d - -f 1) +hash=$(echo $suffix | cut -d - -f 2) -printf "rdev-%s-%s-g%s\n" $commits $datestr ${sha:0:7} +# 016 is rocket-surgically-calibrated to pad the distance from the +# tag to the current commit into a 4-digit number - since maven +# will be treating this as a string, the ide depends on +# 10 being greater than 9 (thus 0010 and 00009.) +printf "%s-%04d-%10s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d") -- cgit v1.2.3 From 69c2eb5ad21848ab6b326c71c157288432e6a9f5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 4 Feb 2012 21:34:51 -0800 Subject: Establish more baseline tags. So we can turn out a build string regardless of local conditions. --- tools/get-scala-revision | 31 ++++++++++++++++++------------- 1 file changed, 18 insertions(+), 13 deletions(-) diff --git a/tools/get-scala-revision b/tools/get-scala-revision index 8441b29070..8747fdc3fb 100755 --- a/tools/get-scala-revision +++ b/tools/get-scala-revision @@ -11,26 +11,31 @@ [[ $# -eq 0 ]] || cd "$1" ensure_tag () { - # v2.10.0-M1 - fallback=58cb15c40d - [[ -n $(git tag -l v2.10.0-M1) ]] || { - git tag -a -m "generated by get-scala-revision" v2.10.0-M1 $fallback + sha=$1 + rev=$2 + + [[ -n $(git tag -l $rev) ]] || { + git tag -a -m "generated by get-scala-revision" $rev $sha } } -ensure_tag + +# Ensure some baseline tags are present so if this repository's +# tags are screwed up or stale, we should still have a reference +# point for a build string. +ensure_tag 58cb15c40d v2.10.0-M1 +ensure_tag 29f3eace1e v2.9.1 +ensure_tag b0d78f6b9c v2.8.2 # the closest tag, obtained separately because we have to # reconstruct the string around the padded distance. -tag=$(git describe --match 'v2*' --abbrev=0) +tag=$(git describe --tags --match 'v2*' --abbrev=0) -# the full string - padding correctness depends on abbrev=10. -described=$(git describe --match 'v2*' --abbrev=10) +# printf %016s is not portable for 0-padding, has to be a digit. +# so we're stuck disassembling it. +described=$(git describe --tags --match 'v2*' --abbrev=10) suffix="${described##${tag}-}" counter=$(echo $suffix | cut -d - -f 1) hash=$(echo $suffix | cut -d - -f 2) -# 016 is rocket-surgically-calibrated to pad the distance from the -# tag to the current commit into a 4-digit number - since maven -# will be treating this as a string, the ide depends on -# 10 being greater than 9 (thus 0010 and 00009.) -printf "%s-%04d-%10s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d") +# v2.10.0-M1-0098-g6f1c486d0b-2012-02-01 +printf "%s-%04d-%s-%s\n" "$tag" "$counter" "$hash" $(date "+%Y-%m-%d") -- cgit v1.2.3 From 4e731759dd5d33781a71c67d19b84f4bb07242d5 Mon Sep 17 00:00:00 2001 From: Szabolcs Berecz Date: Sat, 28 Jan 2012 00:18:15 +0100 Subject: one less TODO and null check --- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 2 +- src/compiler/scala/tools/nsc/backend/icode/Members.scala | 3 +-- 2 files changed, 2 insertions(+), 3 deletions(-) diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 3baff7da9e..6aee52a354 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -133,7 +133,7 @@ abstract class GenICode extends SubComponent { if (!ctx1.bb.closed) ctx1.bb.close prune(ctx1.method) } else - ctx1.method.setCode(null) + ctx1.method.setCode(NoCode) ctx1 case Template(_, _, body) => diff --git a/src/compiler/scala/tools/nsc/backend/icode/Members.scala b/src/compiler/scala/tools/nsc/backend/icode/Members.scala index 2668e7f29f..298c9171a1 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/Members.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/Members.scala @@ -177,8 +177,7 @@ trait Members { /** method parameters */ var params: List[Local] = Nil - // TODO - see how null is stil arriving here - def hasCode = (code ne NoCode) && (code ne null) + def hasCode = code ne NoCode def setCode(code: Code): IMethod = { this.code = code; this -- cgit v1.2.3 From 671f463c30cfa12e6a9498efdf857c5adddd2465 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 5 Feb 2012 16:35:57 +0100 Subject: Fixes curlUpload for Cygwin --- tools/binary-repo-lib.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index 3a75593f21..f5d6043e37 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -7,6 +7,7 @@ remote_urlbase="http://typesafe.artifactoryonline.com/typesafe/scala-sha-bootstr libraryJar="$(pwd)/lib/scala-library.jar" desired_ext=".desired.sha1" push_jar="$(pwd)/tools/push.jar" +if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then push_jar="$(cygpath -m "$push_jar")"; fi # Cache dir has .sbt in it to line up with SBT build. cache_dir="${HOME}/.sbt/cache/scala" -- cgit v1.2.3 From a364a7f3c4e296442e5447e50e6c714fc59d2723 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 5 Feb 2012 16:36:53 +0100 Subject: Fixes OSTYPE check for Cygwin --- tools/binary-repo-lib.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/tools/binary-repo-lib.sh b/tools/binary-repo-lib.sh index f5d6043e37..4c5497e803 100755 --- a/tools/binary-repo-lib.sh +++ b/tools/binary-repo-lib.sh @@ -51,7 +51,7 @@ curlDownload() { checkCurl local jar=$1 local url=$2 - if [[ "$OSTYPE" == *Cygwin* ]]; then + if [[ "$OSTYPE" == *Cygwin* || "$OSTYPE" == *cygwin* ]]; then jar=$(cygpath -m $1) fi http_code=$(curl --write-out '%{http_code}' --silent --fail --output "$jar" "$url") -- cgit v1.2.3 From ffc2389840852a120fecd772206d55db9a79f30e Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 5 Feb 2012 16:49:18 +0100 Subject: Replaced LiftCode with a function in MacroContext Major cleanup of reification: * LiftCode phase has been removed * Code has been deprecated and will be removed as we roll a new starr * Logic related to type-directed lifting has been purged scala.reflect.macro.Context#reify now provides the same services as LiftCode provided (except that it returns Tree, not Code). For testing purposes, I've retained the oh-so-convenient automagic lift. test/files/codelib/code.jar now hosts Code.lift reimplemented in a macro, so that the tests can continue working as if nothing has happened. --- .../scala/reflect/internal/Definitions.scala | 3 - src/compiler/scala/reflect/internal/StdNames.scala | 10 +- src/compiler/scala/reflect/internal/Trees.scala | 9 - src/compiler/scala/tools/ant/Scalac.scala | 2 +- src/compiler/scala/tools/nsc/Global.scala | 14 +- src/compiler/scala/tools/nsc/ast/Reifiers.scala | 445 ++++++++++++++++ .../scala/tools/nsc/ast/ReifyPrinters.scala | 85 +++ .../scala/tools/nsc/settings/ScalaSettings.scala | 2 - .../scala/tools/nsc/transform/LiftCode.scala | 570 --------------------- .../scala/tools/nsc/typechecker/Implicits.scala | 2 +- .../scala/tools/nsc/typechecker/Infer.scala | 8 +- .../scala/tools/nsc/typechecker/Macros.scala | 7 +- .../scala/tools/nsc/typechecker/Typers.scala | 15 +- src/library/scala/reflect/Code.scala | 2 + src/library/scala/reflect/api/StandardNames.scala | 21 + src/library/scala/reflect/api/Trees.scala | 15 +- src/library/scala/reflect/api/Universe.scala | 3 +- src/library/scala/reflect/macro/Context.scala | 21 + src/partest/scala/tools/partest/PartestTask.scala | 12 +- .../scala/tools/partest/nest/CompileManager.scala | 2 + .../scala/tools/partest/nest/PathSettings.scala | 7 + .../tools/partest/nest/ReflectiveRunner.scala | 8 +- .../scala/tools/partest/nest/TestFile.scala | 4 + src/partest/scala/tools/partest/nest/Worker.scala | 4 +- .../scala/tools/partest/utils/CodeTest.scala | 41 -- test/files/codelib/code.jar.desired.sha1 | 1 + test/files/pos/t531.scala | 4 +- test/files/pos/t532.scala | 4 +- test/files/run/code.check | 36 -- test/files/run/code.scala | 60 --- test/files/run/programmatic-main.check | 37 +- test/files/run/reify_closure1.scala | 4 +- test/files/run/reify_closure2a.scala | 4 +- test/files/run/reify_closure3a.scala | 4 +- test/files/run/reify_closure4a.scala | 4 +- test/files/run/reify_closure5a.scala | 4 +- test/files/run/reify_closure6.scala | 4 +- test/files/run/reify_closure7.scala | 4 +- test/files/run/reify_this.scala | 2 +- test/files/run/t4875.check | 17 - test/files/run/t4875.scala | 12 - test/pending/run/reify_closure2b.scala | 4 +- test/pending/run/reify_closure3b.scala | 4 +- test/pending/run/reify_closure4b.scala | 4 +- test/pending/run/reify_closure5b.scala | 4 +- 45 files changed, 687 insertions(+), 842 deletions(-) create mode 100644 src/compiler/scala/tools/nsc/ast/Reifiers.scala create mode 100644 src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala delete mode 100644 src/compiler/scala/tools/nsc/transform/LiftCode.scala create mode 100644 src/library/scala/reflect/api/StandardNames.scala delete mode 100644 src/partest/scala/tools/partest/utils/CodeTest.scala create mode 100644 test/files/codelib/code.jar.desired.sha1 delete mode 100644 test/files/run/code.check delete mode 100644 test/files/run/code.scala delete mode 100644 test/files/run/t4875.check delete mode 100644 test/files/run/t4875.scala diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 8114be20d5..6871822562 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -402,9 +402,6 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val FullManifestModule = getRequiredModule("scala.reflect.Manifest") lazy val OptManifestClass = getRequiredClass("scala.reflect.OptManifest") lazy val NoManifest = getRequiredModule("scala.reflect.NoManifest") - lazy val CodeClass = getClass(sn.Code) - lazy val CodeModule = getModule(sn.Code) - lazy val Code_lift = getMember(CodeModule, nme.lift_) lazy val ScalaSignatureAnnotation = getRequiredClass("scala.reflect.ScalaSignature") lazy val ScalaLongSignatureAnnotation = getRequiredClass("scala.reflect.ScalaLongSignature") diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala index b1a24c0be2..045daa7eb1 100644 --- a/src/compiler/scala/reflect/internal/StdNames.scala +++ b/src/compiler/scala/reflect/internal/StdNames.scala @@ -271,8 +271,9 @@ trait StdNames extends NameManglers { self: SymbolTable => // Compiler utilized names // val productElementName: NameType = "productElementName" val Ident: NameType = "Ident" - val This: NameType = "This" val StringContext: NameType = "StringContext" + val This: NameType = "This" + val Tree : NameType = "Tree" val TYPE_ : NameType = "TYPE" val TypeTree: NameType = "TypeTree" val UNIT : NameType = "UNIT" @@ -427,7 +428,7 @@ trait StdNames extends NameManglers { self: SymbolTable => val toInteger: NameType = "toInteger" } - object tpnme extends TypeNames /*with LibraryTypeNames*/ with TypeNameMangling { + object tpnme extends AbsTypeNames with TypeNames /*with LibraryTypeNames*/ with TypeNameMangling { type NameType = TypeName protected implicit def createNameType(name: String): TypeName = newTypeNameCached(name) @@ -464,7 +465,7 @@ trait StdNames extends NameManglers { self: SymbolTable => val javanme = nme.javaKeywords - object nme extends TermNames /*with LibraryTermNames*/ with TermNameMangling { + object nme extends AbsTermNames with TermNames /*with LibraryTermNames*/ with TermNameMangling { type NameType = TermName protected implicit def createNameType(name: String): TermName = newTermNameCached(name) @@ -711,7 +712,6 @@ trait StdNames extends NameManglers { self: SymbolTable => val BoxedCharacter : TypeName val BoxedNumber : TypeName val Class : TypeName - val Code : TypeName val Delegate : TypeName val IOOBException : TypeName // IndexOutOfBoundsException val InvTargetException : TypeName // InvocationTargetException @@ -846,7 +846,6 @@ trait StdNames extends NameManglers { self: SymbolTable => final val BoxedCharacter: TypeName = "System.IConvertible" final val BoxedNumber: TypeName = "System.IConvertible" final val Class: TypeName = "System.Type" - final val Code: TypeName = tpnme.NO_NAME final val Delegate: TypeName = "System.MulticastDelegate" final val IOOBException: TypeName = "System.IndexOutOfRangeException" final val InvTargetException: TypeName = "System.Reflection.TargetInvocationException" @@ -880,7 +879,6 @@ trait StdNames extends NameManglers { self: SymbolTable => private class J2SENames extends JavaNames { final val BeanProperty: TypeName = "scala.beans.BeanProperty" final val BooleanBeanProperty: TypeName = "scala.beans.BooleanBeanProperty" - final val Code: TypeName = "scala.reflect.Code" final val JavaSerializable: TypeName = "java.io.Serializable" } diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index ca7801ac9d..076a7722ae 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -231,15 +231,6 @@ trait Trees extends api.Trees { self: SymbolTable => def Bind(sym: Symbol, body: Tree): Bind = Bind(sym.name, body) setSymbol sym - - /** Factory method for object creation `new tpt(args_1)...(args_n)` - * A `New(t, as)` is expanded to: `(new t).(as)` - */ - def New(tpt: Tree, argss: List[List[Tree]]): Tree = { - assert(!argss.isEmpty) - val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) - (superRef /: argss) (Apply) - } /** 0-1 argument list new, based on a symbol. */ def New(sym: Symbol, args: Tree*): Tree = diff --git a/src/compiler/scala/tools/ant/Scalac.scala b/src/compiler/scala/tools/ant/Scalac.scala index 04ff0c440d..3c79fcd3fb 100644 --- a/src/compiler/scala/tools/ant/Scalac.scala +++ b/src/compiler/scala/tools/ant/Scalac.scala @@ -90,7 +90,7 @@ class Scalac extends ScalaMatchingTask with ScalacShared { /** Defines valid values for properties that refer to compiler phases. */ object CompilerPhase extends PermissibleValue { - val values = List("namer", "typer", "pickler", "refchecks", "liftcode", + val values = List("namer", "typer", "pickler", "refchecks", "uncurry", "tailcalls", "specialize", "explicitouter", "erasure", "lazyvals", "lambdalift", "constructors", "flatten", "mixin", "cleanup", "icode", "inliner", diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 797ed7e047..d4152dffdc 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -37,6 +37,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb with Plugins with PhaseAssembly with Trees + with Reifiers with TreePrinters with DocComments with MacroContext @@ -124,7 +125,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb /** Print tree in detailed form */ object nodePrinters extends { val global: Global.this.type = Global.this - } with NodePrinters { + } with NodePrinters with ReifyPrinters { infolevel = InfoLevel.Verbose } @@ -134,6 +135,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb } with TreeBrowsers val nodeToString = nodePrinters.nodeToString + val reifiedNodeToString = nodePrinters.reifiedNodeToString val treeBrowser = treeBrowsers.create() // ------------ Hooks for interactive mode------------------------- @@ -457,17 +459,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb val runsRightAfter = None } with RefChecks - // phaseName = "liftcode" - object liftcode extends { - val global: Global.this.type = Global.this - val runsAfter = List[String]("refchecks") - val runsRightAfter = None - } with LiftCode - // phaseName = "uncurry" override object uncurry extends { val global: Global.this.type = Global.this - val runsAfter = List[String]("refchecks", "liftcode") + val runsAfter = List[String]("refchecks") val runsRightAfter = None } with UnCurry @@ -652,7 +647,6 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb superAccessors -> "add super accessors in traits and nested classes", pickler -> "serialize symbol tables", refChecks -> "reference/override checking, translate nested objects", - liftcode -> "reify trees", uncurry -> "uncurry, translate function values to anonymous classes", tailCalls -> "replace tail calls by jumps", specializeTypes -> "@specialized-driven class and method specialization", diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala new file mode 100644 index 0000000000..952110ade2 --- /dev/null +++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala @@ -0,0 +1,445 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Gilles Dubochet + */ + +package scala.tools.nsc +package ast + +import symtab._ +import Flags._ +import scala.collection.{ mutable, immutable } +import scala.collection.mutable.ListBuffer +import scala.tools.nsc.util.FreshNameCreator +import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple } + +/** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type. + * See more info in the comments to `reify' in scala.reflect.macro.Context. + * + * @author Martin Odersky + * @version 2.10 + */ +trait Reifiers { self: Global => + + def reify(tree: Tree): Tree = { + if (tree.tpe != null) { + val saved = printTypings + try { + val reifyDebug = settings.Yreifydebug.value + val debugTrace = util.trace when reifyDebug + debugTrace("transforming = ")(if (settings.Xshowtrees.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) + debugTrace("transformed = ") { + val reifier = new Reifier() + val untyped = reifier.reifyTopLevel(tree) + + val reifyCopypaste = settings.Yreifycopypaste.value + if (reifyCopypaste) { + if (reifyDebug) println("=======================") + println(reifiedNodeToString(untyped)) + if (reifyDebug) println("=======================") + } + + untyped + } + } finally { + printTypings = saved + } + } else { + CannotReifyPreTyperTrees(tree) + } + } + + class Reifier() { + import definitions._ + + final val scalaPrefix = "scala." + final val localPrefix = "$local" + final val memoizerName = "$memo" + + val reifyDebug = settings.Yreifydebug.value + + private val reifiableSyms = mutable.ArrayBuffer[Symbol]() // the symbols that are reified with the tree + private val symIndex = mutable.HashMap[Symbol, Int]() // the index of a reifiable symbol in `reifiableSyms` + private var boundSyms = Set[Symbol]() // set of all symbols that are bound in tree to be reified + + /** + * Generate tree of the form + * + * { val $mr = scala.reflect.runtime.Mirror + * $local1 = new TypeSymbol(owner1, NoPosition, name1) + * ... + * $localN = new TermSymbol(ownerN, NoPositiion, nameN) + * $local1.setInfo(tpe1) + * ... + * $localN.setInfo(tpeN) + * $localN.setAnnotations(annotsN) + * rtree + * } + * + * where + * + * - `$localI` are free type symbols in the environment, as well as local symbols + * of refinement types. + * - `tpeI` are the info's of `symI` + * - `rtree` is code that generates `data` at runtime, maintaining all attributes. + * - `data` is typically a tree or a type. + */ + def reifyTopLevel(data: Any): Tree = { + val rtree = reify(data) + Block(mirrorAlias :: reifySymbolTableSetup, rtree) + } + + private def isLocatable(sym: Symbol) = + sym.isPackageClass || sym.owner.isClass || sym.isTypeParameter && sym.paramPos >= 0 + + private def registerReifiableSymbol(sym: Symbol): Unit = + if (!(symIndex contains sym)) { + sym.owner.ownersIterator find (x => !isLocatable(x)) foreach registerReifiableSymbol + symIndex(sym) = reifiableSyms.length + reifiableSyms += sym + } + + // helper methods + + private def localName(sym: Symbol): TermName = + newTermName(localPrefix + symIndex(sym)) + + private def call(fname: String, args: Tree*): Tree = + Apply(termPath(fname), args.toList) + + private def mirrorSelect(name: String): Tree = + termPath(nme.MIRROR_PREFIX + name) + + private def mirrorCall(name: TermName, args: Tree*): Tree = + call("" + (nme.MIRROR_PREFIX append name), args: _*) + + private def mirrorCall(name: String, args: Tree*): Tree = + call(nme.MIRROR_PREFIX + name, args: _*) + + private def mirrorFactoryCall(value: Product, args: Tree*): Tree = + mirrorCall(value.productPrefix, args: _*) + + private def scalaFactoryCall(name: String, args: Tree*): Tree = + call(scalaPrefix + name + ".apply", args: _*) + + private def mkList(args: List[Tree]): Tree = + scalaFactoryCall("collection.immutable.List", args: _*) + + private def reifyModifiers(m: Modifiers) = + mirrorCall("modifiersFromInternalFlags", reify(m.flags), reify(m.privateWithin), reify(m.annotations)) + + private def reifyAggregate(name: String, args: Any*) = + scalaFactoryCall(name, (args map reify).toList: _*) + + /** + * Reify a list + */ + private def reifyList(xs: List[Any]): Tree = + mkList(xs map reify) + + /** Reify a name */ + private def reifyName(name: Name) = + mirrorCall(if (name.isTypeName) "newTypeName" else "newTermName", Literal(Constant(name.toString))) + + private def isFree(sym: Symbol) = + !(symIndex contains sym) + + /** + * Reify a reference to a symbol + */ + private def reifySymRef(sym: Symbol): Tree = { + symIndex get sym match { + case Some(idx) => + Ident(localName(sym)) + case None => + if (sym == NoSymbol) + mirrorSelect("NoSymbol") + else if (sym == RootPackage) + mirrorSelect("definitions.RootPackage") + else if (sym == RootClass) + mirrorSelect("definitions.RootClass") + else if (sym == EmptyPackage) + mirrorSelect("definitions.EmptyPackage") + else if (sym == EmptyPackageClass) + mirrorSelect("definitions.EmptyPackageClass") + else if (sym.isModuleClass) + Select(reifySymRef(sym.sourceModule), "moduleClass") + else if (sym.isStatic && sym.isClass) + mirrorCall("staticClass", reify(sym.fullName)) + else if (sym.isStatic && sym.isModule) + mirrorCall("staticModule", reify(sym.fullName)) + else if (isLocatable(sym)) + if (sym.isTypeParameter) + mirrorCall("selectParam", reify(sym.owner), reify(sym.paramPos)) + else { + if (reifyDebug) println("locatable: " + sym + " " + sym.isPackageClass + " " + sym.owner + " " + sym.isTypeParameter) + val rowner = reify(sym.owner) + val rname = reify(sym.name.toString) + if (sym.isType) + mirrorCall("selectType", rowner, rname) + else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) { + val index = sym.owner.info.decl(sym.name).alternatives indexOf sym + assert(index >= 0, sym) + mirrorCall("selectOverloadedMethod", rowner, rname, reify(index)) + } else + mirrorCall("selectTerm", rowner, rname) + } + else { + if (sym.isTerm) { + if (reifyDebug) println("Free: " + sym) + val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false) + def markIfCaptured(arg: Ident): Tree = + if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg + mirrorCall("freeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym))) + } else { + if (reifyDebug) println("Late local: " + sym) + registerReifiableSymbol(sym) + reifySymRef(sym) + } + } + } + } + + /** + * reify the creation of a symbol + */ + private def reifySymbolDef(sym: Symbol): Tree = { + if (reifyDebug) println("reify sym def " + sym) + + ValDef(NoMods, localName(sym), TypeTree(), + Apply( + Select(reify(sym.owner), "newNestedSymbol"), + List(reify(sym.name), reify(sym.pos), Literal(Constant(sym.flags))) + ) + ) + } + + /** + * Generate code to add type and annotation info to a reified symbol + */ + private def fillInSymbol(sym: Symbol): Tree = { + val rset = Apply(Select(reifySymRef(sym), nme.setTypeSig), List(reifyType(sym.info))) + if (sym.annotations.isEmpty) rset + else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations))) + } + + /** Reify a scope */ + private def reifyScope(scope: Scope): Tree = { + scope foreach registerReifiableSymbol + mirrorCall(nme.newScopeWith, scope.toList map reifySymRef: _*) + } + + /** Reify a list of symbols that need to be created */ + private def reifySymbols(syms: List[Symbol]): Tree = { + syms foreach registerReifiableSymbol + mkList(syms map reifySymRef) + } + + /** Reify a type that defines some symbols */ + private def reifyTypeBinder(value: Product, bound: List[Symbol], underlying: Type): Tree = + mirrorFactoryCall(value, reifySymbols(bound), reify(underlying)) + + /** Reify a type */ + private def reifyType(tpe0: Type): Tree = { + val tpe = tpe0.normalize + val tsym = tpe.typeSymbol + if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic) + Select(reifySymRef(tpe.typeSymbol), nme.asTypeConstructor) + else tpe match { + case t @ NoType => + reifyMirrorObject(t) + case t @ NoPrefix => + reifyMirrorObject(t) + case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic => + mirrorCall(nme.thisModuleType, reify(clazz.fullName)) + case t @ RefinedType(parents, decls) => + registerReifiableSymbol(tpe.typeSymbol) + mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol)) + case t @ ClassInfoType(parents, decls, clazz) => + registerReifiableSymbol(clazz) + mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol)) + case t @ ExistentialType(tparams, underlying) => + reifyTypeBinder(t, tparams, underlying) + case t @ PolyType(tparams, underlying) => + reifyTypeBinder(t, tparams, underlying) + case t @ MethodType(params, restpe) => + reifyTypeBinder(t, params, restpe) + case _ => + reifyProductUnsafe(tpe) + } + } + + private def definedInLiftedCode(tpe: Type) = + tpe exists (tp => boundSyms contains tp.typeSymbol) + + private def isErased(tree: Tree) = tree match { + case tt: TypeTree => definedInLiftedCode(tt.tpe) && tt.original == null + case _ => false + } + + /** Reify a tree */ + private def reifyTree(tree: Tree): Tree = tree match { + case EmptyTree => + reifyMirrorObject(EmptyTree) + case This(_) if !(boundSyms contains tree.symbol) => + reifyFree(tree) + case Ident(_) if !(boundSyms contains tree.symbol) => + if (tree.symbol.isVariable && tree.symbol.owner.isTerm) { + captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here. + mirrorCall("Select", reifyFree(tree), reifyName(nme.elem)) + } else reifyFree(tree) + case tt: TypeTree if (tt.tpe != null) => + if (definedInLiftedCode(tt.tpe)) { + // erase non-essential (i.e. inferred) types + // reify symless counterparts of essential types + if (tt.original != null) reify(tt.original) else mirrorCall("TypeTree") + } else { + var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe)) + if (tt.original != null) { + val setOriginal = Select(rtt, newTermName("setOriginal")) + val reifiedOriginal = reify(tt.original) + rtt = Apply(setOriginal, List(reifiedOriginal)) + } + rtt + } + case ta @ TypeApply(hk, ts) => + if (ts exists isErased) reifyTree(hk) else reifyProduct(ta) + case self.emptyValDef => + mirrorSelect(nme.emptyValDef) + case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) => + CannotReifyClassOfBoundType(tree, tpe) + case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym => + CannotReifyClassOfBoundEnum(tree, constant.tpe) + case _ => + if (tree.isDef) { + if (reifyDebug) println("boundSym: " + tree.symbol) + boundSyms += tree.symbol + } + + reifyProduct(tree) + /* + if (tree.isDef || tree.isInstanceOf[Function]) + registerReifiableSymbol(tree.symbol) + if (tree.hasSymbol) + rtree = Apply(Select(rtree, nme.setSymbol), List(reifySymRef(tree.symbol))) + Apply(Select(rtree, nme.setType), List(reifyType(tree.tpe))) +*/ + } + + /** + * Reify a free reference. The result will be either a mirror reference + * to a global value, or else a mirror Literal. + */ + private def reifyFree(tree: Tree): Tree = tree match { + case This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass => + val sym = tree.symbol + if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) + if (reifyDebug) println("Free: " + sym) + val freeVar = mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), This(sym)) + mirrorCall(nme.Ident, freeVar) + case This(_) => + if (reifyDebug) println("This for %s, reified as This".format(tree.symbol)) + mirrorCall(nme.This, reifySymRef(tree.symbol)) + case _ => + mirrorCall(nme.Ident, reifySymRef(tree.symbol)) + } + + // todo: consider whether we should also reify positions + private def reifyPosition(pos: Position): Tree = + reifyMirrorObject(NoPosition) + + // !!! we must eliminate these casts. + private def reifyProductUnsafe(x: Any): Tree = + reifyProduct(x.asInstanceOf[Product]) + private def reifyProduct(x: Product): Tree = + mirrorCall(x.productPrefix, (x.productIterator map reify).toList: _*) + + /** + * Reify a case object defined in Mirror + */ + private def reifyMirrorObject(name: String): Tree = mirrorSelect(name) + private def reifyMirrorObject(x: Product): Tree = reifyMirrorObject(x.productPrefix) + + private def isReifiableConstant(value: Any) = value match { + case null => true // seems pretty reifable to me? + case _: String => true + case _ => isAnyVal(value) + } + + /** Reify an arbitary value */ + private def reify(value: Any): Tree = value match { + case tree: Tree => reifyTree(tree) + case sym: Symbol => reifySymRef(sym) + case tpe: Type => reifyType(tpe) + case xs: List[_] => reifyList(xs) + case xs: Array[_] => scalaFactoryCall(nme.Array, xs map reify: _*) + case scope: Scope => reifyScope(scope) + case x: Name => reifyName(x) + case x: Position => reifyPosition(x) + case x: Modifiers => reifyModifiers(x) + case _ => + if (isReifiableConstant(value)) Literal(Constant(value)) + else reifyProductUnsafe(value) + } + + /** + * An (unreified) path that refers to definition with given fully qualified name + * @param mkName Creator for last portion of name (either TermName or TypeName) + */ + private def path(fullname: String, mkName: String => Name): Tree = { + val parts = fullname split "\\." + val prefixParts = parts.init + val lastName = mkName(parts.last) + if (prefixParts.isEmpty) Ident(lastName) + else { + val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _)) + Select(prefixTree, lastName) + } + } + + /** An (unreified) path that refers to term definition with given fully qualified name */ + private def termPath(fullname: String): Tree = path(fullname, newTermName) + + /** An (unreified) path that refers to type definition with given fully qualified name */ + private def typePath(fullname: String): Tree = path(fullname, newTypeName) + + private def mirrorAlias = + ValDef(NoMods, nme.MIRROR_SHORT, SingletonTypeTree(termPath(fullnme.MirrorPackage)), termPath(fullnme.MirrorPackage)) + + /** + * Generate code that generates a symbol table of all symbols registered in `reifiableSyms` + */ + private def reifySymbolTableSetup: List[Tree] = { + val symDefs, fillIns = new mutable.ArrayBuffer[Tree] + var i = 0 + while (i < reifiableSyms.length) { + // fillInSymbol might create new reifiableSyms, that's why this is done iteratively + symDefs += reifySymbolDef(reifiableSyms(i)) + fillIns += fillInSymbol(reifiableSyms(i)) + i += 1 + } + + symDefs.toList ++ fillIns.toList + } + } + + /** A throwable signalling a reification error */ + class ReifierError(var pos: Position, val msg: String) extends Throwable(msg) { + def this(msg: String) = this(NoPosition, msg) + } + + def CannotReifyPreTyperTrees(tree: Tree) = { + val msg = "pre-typer trees are not supported, consider typechecking the tree before passing it to the reifier" + throw new ReifierError(tree.pos, msg) + } + + def CannotReifyClassOfBoundType(tree: Tree, tpe: Type) = { + val msg = "cannot reify classOf[%s] which refers to a type declared inside the block being reified".format(tpe) + throw new ReifierError(tree.pos, msg) + } + + def CannotReifyClassOfBoundEnum(tree: Tree, tpe: Type) = { + val msg = "cannot reify classOf[%s] which refers to an enum declared inside the block being reified".format(tpe) + throw new ReifierError(tree.pos, msg) + } +} diff --git a/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala new file mode 100644 index 0000000000..aebde18703 --- /dev/null +++ b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala @@ -0,0 +1,85 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.tools.nsc +package ast + +import compat.Platform.EOL +import symtab._ +import Flags._ + +trait ReifyPrinters { self: NodePrinters => + + val global: Global + import global._ + + object reifiedNodeToString extends Function1[Tree, String] { + def apply(tree: Tree): String = { + import scala.reflect.api.Modifier + import scala.reflect.api.Modifier._ + + def copypasteModifier(mod: Modifier.Value): String = mod match { + case mod @ ( + `protected` | `private` | `override` | + `abstract` | `final` | `sealed` | + `implicit` | `lazy` | `macro` | + `case` | `trait`) => "`" + mod.toString + "`" + case mod => mod.toString + } + + // @PP: I fervently hope this is a test case or something, not anything being + // depended upon. Of more fragile code I cannot conceive. + // @eb: This stuff is only needed to debug-print out reifications in human-readable format + // Rolling a full-fledged, robust TreePrinter would be several times more code. + (for (line <- (tree.toString.split(EOL) drop 2 dropRight 1)) yield { + var s = line.trim + s = s.replace("$mr.", "") + s = s.replace(".apply", "") + s = s.replace("scala.collection.immutable.", "") + s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List") + s = "List\\[.*?\\]".r.replaceAllIn(s, "List") + s = s.replace("immutable.this.Nil", "List()") + s = s.replace("modifiersFromInternalFlags", "Modifiers") + s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()") + s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => { + val buf = new StringBuilder + + val flags = m.group(1).toLong + val s_flags = Flags.modifiersOfFlags(flags) map copypasteModifier mkString ", " + if (s_flags != "") + buf.append("Set(" + s_flags + ")") + + val privateWithin = "" + m.group(2) + if (privateWithin != "") + buf.append(", newTypeName(\"" + privateWithin + "\")") + + val annotations = m.group(3) + if (annotations.nonEmpty) + buf.append(", List(" + annotations + ")") + + "Modifiers(" + buf.toString + ")" + }) + s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => { + val flags = m.group(1).toLong + val mods = Flags.modifiersOfFlags(flags) map copypasteModifier + "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))" + }) + + s + }) mkString EOL + } + } + + + def printReifyCopypaste(tree: Tree) { + val reifyDebug = settings.Yreifydebug.value + if (reifyDebug) println("=======================") + printReifyCopypaste1(tree) + if (reifyDebug) println("=======================") + } + + def printReifyCopypaste1(tree: Tree) { + } +} \ No newline at end of file diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index d1ce460eb9..485a1f3a5c 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -167,8 +167,6 @@ trait ScalaSettings extends AbsScalaSettings val Yreifycopypaste = BooleanSetting ("-Yreify-copypaste", "Dump the reified trees in copypasteable representation.") val Yreifydebug = BooleanSetting ("-Yreify-debug", "Trace reification.") - val Yreifytyperdebug - = BooleanSetting ("-Yreifytyper-debug", "Trace typings of reified trees.") val Yreplsync = BooleanSetting ("-Yrepl-sync", "Do not use asynchronous code for repl startup") val Yrepldebug = BooleanSetting ("-Yrepl-debug", "Trace all repl activity.") . withPostSetHook(_ => interpreter.replProps.debug setValue true) diff --git a/src/compiler/scala/tools/nsc/transform/LiftCode.scala b/src/compiler/scala/tools/nsc/transform/LiftCode.scala deleted file mode 100644 index d0ed92f8ba..0000000000 --- a/src/compiler/scala/tools/nsc/transform/LiftCode.scala +++ /dev/null @@ -1,570 +0,0 @@ -/* NSC -- new Scala compiler - * Copyright 2005-2011 LAMP/EPFL - * @author Gilles Dubochet - */ - -package scala.tools.nsc -package transform - -import symtab._ -import Flags._ -import scala.collection.{ mutable, immutable } -import scala.collection.mutable.ListBuffer -import scala.tools.nsc.util.FreshNameCreator -import scala.runtime.ScalaRunTime.{ isAnyVal, isTuple } - -/** - * Translate expressions of the form reflect.Code.lift(exp) - * to the reified "reflect trees" representation of exp. - * Also: mutable variables that are accessed from a local function are wrapped in refs. - * - * @author Martin Odersky - * @version 2.10 - */ -abstract class LiftCode extends Transform with TypingTransformers { - - import global._ // the global environment - import definitions._ // standard classes and methods - import typer.{ typed, atOwner } // methods to type trees - - val symbols: global.type = global - - /** the following two members override abstract members in Transform */ - val phaseName: String = "liftcode" - - def newTransformer(unit: CompilationUnit): Transformer = - new Codifier(unit) - - private lazy val MirrorMemberNames = - ReflectRuntimeMirror.info.nonPrivateMembers filter (_.isTerm) map (_.toString) toSet - - // Would be nice if we could use something like this to check the names, - // but it seems that info is unavailable when I need it. - private def mirrorFactoryName(value: Any): Option[String] = value match { - // Modest (inadequate) sanity check that there's a member by this name. - case x: Product if MirrorMemberNames(x.productPrefix) => - Some(x.productPrefix) - case _ => - Some(value.getClass.getName split """[$.]""" last) filter MirrorMemberNames - } - private def isMirrorMemberObject(value: Product) = value match { - case NoType | NoPrefix | NoPosition | EmptyTree => true - case _ => false - } - - class Codifier(unit: CompilationUnit) extends TypingTransformer(unit) { - - val reifyDebug = settings.Yreifydebug.value - val reifyTyperDebug = settings.Yreifytyperdebug.value - val debugTrace = util.trace when reifyDebug - - val reifyCopypaste = settings.Yreifycopypaste.value - def printCopypaste(tree: Tree) { - if (reifyDebug) println("=======================") - printCopypaste1(tree) - if (reifyDebug) println("=======================") - } - def printCopypaste1(tree: Tree) { - import scala.reflect.api.Modifier - import scala.reflect.api.Modifier._ - - def copypasteModifier(mod: Modifier.Value): String = mod match { - case mod @ ( - `protected` | `private` | `override` | - `abstract` | `final` | `sealed` | - `implicit` | `lazy` | `macro` | - `case` | `trait`) => "`" + mod.toString + "`" - case mod => mod.toString - } - - // I fervently hope this is a test case or something, not anything being - // depended upon. Of more fragile code I cannot conceive. - for (line <- (tree.toString.split(Properties.lineSeparator) drop 2 dropRight 1)) { - var s = line.trim - s = s.replace("$mr.", "") - s = s.replace(".apply", "") - s = s.replace("scala.collection.immutable.", "") - s = "List\\[List\\[.*?\\].*?\\]".r.replaceAllIn(s, "List") - s = "List\\[.*?\\]".r.replaceAllIn(s, "List") - s = s.replace("immutable.this.Nil", "List()") - s = s.replace("modifiersFromInternalFlags", "Modifiers") - s = s.replace("Modifiers(0L, newTypeName(\"\"), List())", "Modifiers()") - s = """Modifiers\((\d+)[lL], newTypeName\("(.*?)"\), List\((.*?)\)\)""".r.replaceAllIn(s, m => { - val buf = new StringBuilder - - val flags = m.group(1).toLong - val s_flags = Flags.modifiersOfFlags(flags) map copypasteModifier mkString ", " - if (s_flags != "") - buf.append("Set(" + s_flags + ")") - - val privateWithin = "" + m.group(2) - if (privateWithin != "") - buf.append(", newTypeName(\"" + privateWithin + "\")") - - val annotations = m.group(3) - if (annotations.nonEmpty) - buf.append(", List(" + annotations + ")") - - "Modifiers(" + buf.toString + ")" - }) - s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => { - val flags = m.group(1).toLong - val mods = Flags.modifiersOfFlags(flags) map copypasteModifier - "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))" - }) - - println(s) - } - } - - override def transformUnit(unit: CompilationUnit) { - atPhase(phase.next) { - super.transformUnit(unit) - } - } - - override def transform(tree: Tree): Tree = { - val sym = tree.symbol - tree match { - case Apply(_, List(tree)) if sym == Code_lift => // reify Code.lift[T](expr) instances - val saved = printTypings - try { - debugTrace("transforming = ")(if (settings.Xshowtrees.value) "\n" + nodePrinters.nodeToString(tree).trim else tree.toString) - debugTrace("transformed = ") { - val untyped = codify(super.transform(tree)) - if (reifyCopypaste) printCopypaste(untyped) - - printTypings = reifyTyperDebug - val typed = localTyper.typedPos(tree.pos)(untyped) - typed - } - } catch { - case ex: ReifierError => - unit.error(ex.pos, ex.msg) - tree - } finally { - printTypings = saved - } - case _ => - super.transform(tree) - } - } - - def codify(tree: Tree): Tree = debugTrace("codified " + tree + " -> ") { - val targetType = definitions.CodeClass.primaryConstructor.info.paramTypes.head - val reifier = new Reifier() - val arg = gen.mkAsInstanceOf(reifier.reifyTopLevel(tree), targetType, wrapInApply = false) - val treetpe = // this really should use packedType(tree.tpe, context.owner) - // where packedType is defined in Typers. But we can do that only if liftCode is moved to Typers. - if (tree.tpe.typeSymbol.isAnonymousClass) tree.tpe.typeSymbol.classBound - else tree.tpe - New(TypeTree(appliedType(definitions.CodeClass.typeConstructor, List(treetpe.widen))), - List(List(arg))) - } - } - - /** - * Given a tree or type, generate a tree that when executed at runtime produces the original tree or type. - * For instance: Given - * - * var x = 1; Code(x + 1) - * - * The `x + 1` expression is reified to - * - * $mr.Apply($mr.Select($mr.Ident($mr.freeVar("x". , x), "+"), List($mr.Literal($mr.Constant(1)))))) - * - * Or, the term name 'abc' is reified to: - * - * $mr.Apply($mr.Select($mr.Ident("newTermName")), List(Literal(Constant("abc"))))) - * - * todo: Treat embedded Code blocks by merging them into containing block - * - */ - class Reifier() { - - final val scalaPrefix = "scala." - final val localPrefix = "$local" - final val memoizerName = "$memo" - - val reifyDebug = settings.Yreifydebug.value - - private val reifiableSyms = mutable.ArrayBuffer[Symbol]() // the symbols that are reified with the tree - private val symIndex = mutable.HashMap[Symbol, Int]() // the index of a reifiable symbol in `reifiableSyms` - private var boundSyms = Set[Symbol]() // set of all symbols that are bound in tree to be reified - - /** - * Generate tree of the form - * - * { val $mr = scala.reflect.runtime.Mirror - * $local1 = new TypeSymbol(owner1, NoPosition, name1) - * ... - * $localN = new TermSymbol(ownerN, NoPositiion, nameN) - * $local1.setInfo(tpe1) - * ... - * $localN.setInfo(tpeN) - * $localN.setAnnotations(annotsN) - * rtree - * } - * - * where - * - * - `$localI` are free type symbols in the environment, as well as local symbols - * of refinement types. - * - `tpeI` are the info's of `symI` - * - `rtree` is code that generates `data` at runtime, maintaining all attributes. - * - `data` is typically a tree or a type. - */ - def reifyTopLevel(data: Any): Tree = { - val rtree = reify(data) - Block(mirrorAlias :: reifySymbolTableSetup, rtree) - } - - private def isLocatable(sym: Symbol) = - sym.isPackageClass || sym.owner.isClass || sym.isTypeParameter && sym.paramPos >= 0 - - private def registerReifiableSymbol(sym: Symbol): Unit = - if (!(symIndex contains sym)) { - sym.owner.ownersIterator find (x => !isLocatable(x)) foreach registerReifiableSymbol - symIndex(sym) = reifiableSyms.length - reifiableSyms += sym - } - - // helper methods - - private def localName(sym: Symbol): TermName = - newTermName(localPrefix + symIndex(sym)) - - private def call(fname: String, args: Tree*): Tree = - Apply(termPath(fname), args.toList) - - private def mirrorSelect(name: String): Tree = - termPath(nme.MIRROR_PREFIX + name) - - private def mirrorCall(name: TermName, args: Tree*): Tree = - call("" + (nme.MIRROR_PREFIX append name), args: _*) - - private def mirrorCall(name: String, args: Tree*): Tree = - call(nme.MIRROR_PREFIX + name, args: _*) - - private def mirrorFactoryCall(value: Product, args: Tree*): Tree = - mirrorCall(value.productPrefix, args: _*) - - private def scalaFactoryCall(name: String, args: Tree*): Tree = - call(scalaPrefix + name + ".apply", args: _*) - - private def mkList(args: List[Tree]): Tree = - scalaFactoryCall("collection.immutable.List", args: _*) - - private def reifyModifiers(m: Modifiers) = - mirrorCall("modifiersFromInternalFlags", reify(m.flags), reify(m.privateWithin), reify(m.annotations)) - - private def reifyAggregate(name: String, args: Any*) = - scalaFactoryCall(name, (args map reify).toList: _*) - - /** - * Reify a list - */ - private def reifyList(xs: List[Any]): Tree = - mkList(xs map reify) - - /** Reify a name */ - private def reifyName(name: Name) = - mirrorCall(if (name.isTypeName) "newTypeName" else "newTermName", Literal(Constant(name.toString))) - - private def isFree(sym: Symbol) = - !(symIndex contains sym) - - /** - * Reify a reference to a symbol - */ - private def reifySymRef(sym: Symbol): Tree = { - symIndex get sym match { - case Some(idx) => - Ident(localName(sym)) - case None => - if (sym == NoSymbol) - mirrorSelect("NoSymbol") - else if (sym == RootPackage) - mirrorSelect("definitions.RootPackage") - else if (sym == RootClass) - mirrorSelect("definitions.RootClass") - else if (sym == EmptyPackage) - mirrorSelect("definitions.EmptyPackage") - else if (sym == EmptyPackageClass) - mirrorSelect("definitions.EmptyPackageClass") - else if (sym.isModuleClass) - Select(reifySymRef(sym.sourceModule), "moduleClass") - else if (sym.isStatic && sym.isClass) - mirrorCall("staticClass", reify(sym.fullName)) - else if (sym.isStatic && sym.isModule) - mirrorCall("staticModule", reify(sym.fullName)) - else if (isLocatable(sym)) - if (sym.isTypeParameter) - mirrorCall("selectParam", reify(sym.owner), reify(sym.paramPos)) - else { - if (reifyDebug) println("locatable: " + sym + " " + sym.isPackageClass + " " + sym.owner + " " + sym.isTypeParameter) - val rowner = reify(sym.owner) - val rname = reify(sym.name.toString) - if (sym.isType) - mirrorCall("selectType", rowner, rname) - else if (sym.isMethod && sym.owner.isClass && sym.owner.info.decl(sym.name).isOverloaded) { - val index = sym.owner.info.decl(sym.name).alternatives indexOf sym - assert(index >= 0, sym) - mirrorCall("selectOverloadedMethod", rowner, rname, reify(index)) - } else - mirrorCall("selectTerm", rowner, rname) - } - else { - if (sym.isTerm) { - if (reifyDebug) println("Free: " + sym) - val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false) - def markIfCaptured(arg: Ident): Tree = - if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg - mirrorCall("freeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym))) - } else { - if (reifyDebug) println("Late local: " + sym) - registerReifiableSymbol(sym) - reifySymRef(sym) - } - } - } - } - - /** - * reify the creation of a symbol - */ - private def reifySymbolDef(sym: Symbol): Tree = { - if (reifyDebug) println("reify sym def " + sym) - - ValDef(NoMods, localName(sym), TypeTree(), - Apply( - Select(reify(sym.owner), "newNestedSymbol"), - List(reify(sym.name), reify(sym.pos), Literal(Constant(sym.flags))) - ) - ) - } - - /** - * Generate code to add type and annotation info to a reified symbol - */ - private def fillInSymbol(sym: Symbol): Tree = { - val rset = Apply(Select(reifySymRef(sym), nme.setTypeSig), List(reifyType(sym.info))) - if (sym.annotations.isEmpty) rset - else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations))) - } - - /** Reify a scope */ - private def reifyScope(scope: Scope): Tree = { - scope foreach registerReifiableSymbol - mirrorCall(nme.newScopeWith, scope.toList map reifySymRef: _*) - } - - /** Reify a list of symbols that need to be created */ - private def reifySymbols(syms: List[Symbol]): Tree = { - syms foreach registerReifiableSymbol - mkList(syms map reifySymRef) - } - - /** Reify a type that defines some symbols */ - private def reifyTypeBinder(value: Product, bound: List[Symbol], underlying: Type): Tree = - mirrorFactoryCall(value, reifySymbols(bound), reify(underlying)) - - /** Reify a type */ - private def reifyType(tpe0: Type): Tree = { - val tpe = tpe0.normalize - val tsym = tpe.typeSymbol - if (tsym.isClass && tpe == tsym.typeConstructor && tsym.isStatic) - Select(reifySymRef(tpe.typeSymbol), nme.asTypeConstructor) - else tpe match { - case t @ NoType => - reifyMirrorObject(t) - case t @ NoPrefix => - reifyMirrorObject(t) - case tpe @ ThisType(clazz) if clazz.isModuleClass && clazz.isStatic => - mirrorCall(nme.thisModuleType, reify(clazz.fullName)) - case t @ RefinedType(parents, decls) => - registerReifiableSymbol(tpe.typeSymbol) - mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol)) - case t @ ClassInfoType(parents, decls, clazz) => - registerReifiableSymbol(clazz) - mirrorFactoryCall(t, reify(parents), reify(decls), reify(t.typeSymbol)) - case t @ ExistentialType(tparams, underlying) => - reifyTypeBinder(t, tparams, underlying) - case t @ PolyType(tparams, underlying) => - reifyTypeBinder(t, tparams, underlying) - case t @ MethodType(params, restpe) => - reifyTypeBinder(t, params, restpe) - case _ => - reifyProductUnsafe(tpe) - } - } - - private def definedInLiftedCode(tpe: Type) = - tpe exists (tp => boundSyms contains tp.typeSymbol) - - private def isErased(tree: Tree) = tree match { - case tt: TypeTree => definedInLiftedCode(tt.tpe) && tt.original == null - case _ => false - } - - /** Reify a tree */ - private def reifyTree(tree: Tree): Tree = tree match { - case EmptyTree => - reifyMirrorObject(EmptyTree) - case This(_) if !(boundSyms contains tree.symbol) => - reifyFree(tree) - case Ident(_) if !(boundSyms contains tree.symbol) => - if (tree.symbol.isVariable && tree.symbol.owner.isTerm) { - captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here. - mirrorCall("Select", reifyFree(tree), reifyName(nme.elem)) - } else reifyFree(tree) - case tt: TypeTree if (tt.tpe != null) => - if (definedInLiftedCode(tt.tpe)) { - // erase non-essential (i.e. inferred) types - // reify symless counterparts of essential types - if (tt.original != null) reify(tt.original) else mirrorCall("TypeTree") - } else { - var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe)) - if (tt.original != null) { - val setOriginal = Select(rtt, newTermName("setOriginal")) - val reifiedOriginal = reify(tt.original) - rtt = Apply(setOriginal, List(reifiedOriginal)) - } - rtt - } - case ta @ TypeApply(hk, ts) => - if (ts exists isErased) reifyTree(hk) else reifyProduct(ta) - case global.emptyValDef => - mirrorSelect(nme.emptyValDef) - case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) => - CannotReifyClassOfBoundType(tree, tpe) - case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym => - CannotReifyClassOfBoundEnum(tree, constant.tpe) - case _ => - if (tree.isDef) { - if (reifyDebug) println("boundSym: " + tree.symbol) - boundSyms += tree.symbol - } - - reifyProduct(tree) - /* - if (tree.isDef || tree.isInstanceOf[Function]) - registerReifiableSymbol(tree.symbol) - if (tree.hasSymbol) - rtree = Apply(Select(rtree, nme.setSymbol), List(reifySymRef(tree.symbol))) - Apply(Select(rtree, nme.setType), List(reifyType(tree.tpe))) -*/ - } - - /** - * Reify a free reference. The result will be either a mirror reference - * to a global value, or else a mirror Literal. - */ - private def reifyFree(tree: Tree): Tree = tree match { - case This(_) if tree.symbol.isClass && !tree.symbol.isModuleClass => - val sym = tree.symbol - if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) - if (reifyDebug) println("Free: " + sym) - val freeVar = mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), This(sym)) - mirrorCall(nme.Ident, freeVar) - case This(_) => - if (reifyDebug) println("This for %s, reified as This".format(tree.symbol)) - mirrorCall(nme.This, reifySymRef(tree.symbol)) - case _ => - mirrorCall(nme.Ident, reifySymRef(tree.symbol)) - } - - // todo: consider whether we should also reify positions - private def reifyPosition(pos: Position): Tree = - reifyMirrorObject(NoPosition) - - // !!! we must eliminate these casts. - private def reifyProductUnsafe(x: Any): Tree = - reifyProduct(x.asInstanceOf[Product]) - private def reifyProduct(x: Product): Tree = - mirrorCall(x.productPrefix, (x.productIterator map reify).toList: _*) - - /** - * Reify a case object defined in Mirror - */ - private def reifyMirrorObject(name: String): Tree = mirrorSelect(name) - private def reifyMirrorObject(x: Product): Tree = reifyMirrorObject(x.productPrefix) - - private def isReifiableConstant(value: Any) = value match { - case null => true // seems pretty reifable to me? - case _: String => true - case _ => isAnyVal(value) - } - - /** Reify an arbitary value */ - private def reify(value: Any): Tree = value match { - case tree: Tree => reifyTree(tree) - case sym: Symbol => reifySymRef(sym) - case tpe: Type => reifyType(tpe) - case xs: List[_] => reifyList(xs) - case xs: Array[_] => scalaFactoryCall(nme.Array, xs map reify: _*) - case scope: Scope => reifyScope(scope) - case x: Name => reifyName(x) - case x: Position => reifyPosition(x) - case x: Modifiers => reifyModifiers(x) - case _ => - if (isReifiableConstant(value)) Literal(Constant(value)) - else reifyProductUnsafe(value) - } - - /** - * An (unreified) path that refers to definition with given fully qualified name - * @param mkName Creator for last portion of name (either TermName or TypeName) - */ - private def path(fullname: String, mkName: String => Name): Tree = { - val parts = fullname split "\\." - val prefixParts = parts.init - val lastName = mkName(parts.last) - if (prefixParts.isEmpty) Ident(lastName) - else { - val prefixTree = ((Ident(prefixParts.head): Tree) /: prefixParts.tail)(Select(_, _)) - Select(prefixTree, lastName) - } - } - - /** An (unreified) path that refers to term definition with given fully qualified name */ - private def termPath(fullname: String): Tree = path(fullname, newTermName) - - /** An (unreified) path that refers to type definition with given fully qualified name */ - private def typePath(fullname: String): Tree = path(fullname, newTypeName) - - private def mirrorAlias = - ValDef(NoMods, nme.MIRROR_SHORT, TypeTree(), termPath(fullnme.MirrorPackage)) - - /** - * Generate code that generates a symbol table of all symbols registered in `reifiableSyms` - */ - private def reifySymbolTableSetup: List[Tree] = { - val symDefs, fillIns = new mutable.ArrayBuffer[Tree] - var i = 0 - while (i < reifiableSyms.length) { - // fillInSymbol might create new reifiableSyms, that's why this is done iteratively - symDefs += reifySymbolDef(reifiableSyms(i)) - fillIns += fillInSymbol(reifiableSyms(i)) - i += 1 - } - - symDefs.toList ++ fillIns.toList - } - } - - /** A throwable signalling a reification error */ - class ReifierError(var pos: Position, val msg: String) extends Throwable(msg) { - def this(msg: String) = this(NoPosition, msg) - } - - def CannotReifyClassOfBoundType(tree: Tree, tpe: Type) = { - val msg = "cannot reify classOf[%s] which refers to a type declared inside the block being reified".format(tpe) - throw new ReifierError(tree.pos, msg) - } - - def CannotReifyClassOfBoundEnum(tree: Tree, tpe: Type) = { - val msg = "cannot reify classOf[%s] which refers to an enum declared inside the block being reified".format(tpe) - throw new ReifierError(tree.pos, msg) - } -} diff --git a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala index 036e7fc750..3d2f86d54d 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Implicits.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Implicits.scala @@ -1163,7 +1163,7 @@ trait Implicits { /* !!! the following is almost right, but we have to splice nested manifest * !!! types into this type. This requires a substantial extension of * !!! reifiers. - val reifier = new liftcode.Reifier() + val reifier = new Reifier() val rtree = reifier.reifyTopLevel(tp1) manifestFactoryCall("apply", tp, rtree) */ diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index eac657da19..e8c03aff66 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -640,13 +640,7 @@ trait Infer { case ExistentialType(tparams, qtpe) => isApplicable(undetparams, qtpe, argtpes0, pt) case MethodType(params, _) => - val formals0 = params map { param => - param.tpe match { - case TypeRef(_, sym, List(tpe)) if sym isNonBottomSubClass CodeClass => tpe - case tpe => tpe - } - } - val formals = formalTypes(formals0, argtpes0.length) + val formals = formalTypes(params map { _.tpe }, argtpes0.length) def tryTupleApply: Boolean = { // if 1 formal, 1 argtpe (a tuple), otherwise unmodified argtpes0 diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index c63ae90ef6..4c790bfc34 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -108,11 +108,14 @@ trait Macros { self: Analyzer => else { val receiverClass: mirror.Symbol = mirror.classWithName(mmeth.owner.fullName) val receiverObj = receiverClass.companionModule - if (receiverObj == NoSymbol) None + if (receiverObj == mirror.NoSymbol) None else { val receiver = mirror.getCompanionObject(receiverClass) val rmeth = receiverObj.info.member(mirror.newTermName(mmeth.name.toString)) - Some((receiver, rmeth)) + if (rmeth == mirror.NoSymbol) None + else { + Some((receiver, rmeth)) + } } } } catch { diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index d039515320..eb0bed035c 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -708,8 +708,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } - def isCodeType(tpe: Type) = tpe.typeSymbol isNonBottomSubClass CodeClass - /** Perform the following adaptations of expression, pattern or type `tree` wrt to * given mode `mode` and given prototype `pt`: * (-1) For expressions with annotated types, let AnnotationCheckers decide what to do @@ -1993,8 +1991,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { */ def typedFunction(fun: Function, mode: Int, pt: Type): Tree = { val numVparams = fun.vparams.length - val codeExpected = !forMSIL && (pt.typeSymbol isNonBottomSubClass CodeClass) - if (numVparams > definitions.MaxFunctionArity) return MaxFunctionArityError(fun) @@ -2011,7 +2007,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { else (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType) - val (clazz, argpts, respt) = decompose(if (codeExpected) pt.normalize.typeArgs.head else pt) + val (clazz, argpts, respt) = decompose(pt) if (argpts.lengthCompare(numVparams) != 0) WrongNumberOfParametersError(fun, argpts) else { @@ -2021,7 +2017,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { if (isFullyDefined(argpt)) argpt else { fun match { - case etaExpansion(vparams, fn, args) if !codeExpected => + case etaExpansion(vparams, fn, args) => silent(_.typed(fn, forFunMode(mode), pt)) match { case SilentResultValue(fn1) if context.undetparams.isEmpty => // if context,undetparams is not empty, the function was polymorphic, @@ -2053,13 +2049,8 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val restpe = packedType(body1, fun.symbol).deconst.resultType val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe) // body = checkNoEscaping.locals(context.scope, restpe, body) - val fun1 = treeCopy.Function(fun, vparams, body1).setType(funtpe) - if (codeExpected) lifted(fun1) else fun1 - } + treeCopy.Function(fun, vparams, body1).setType(funtpe) } - - def lifted(tree: Tree): Tree = typedPos(tree.pos) { - Apply(Select(Ident(CodeModule), nme.lift_), List(tree)) } def typedRefinement(stats: List[Tree]) { diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala index 52705d302c..f28264c7a2 100644 --- a/src/library/scala/reflect/Code.scala +++ b/src/library/scala/reflect/Code.scala @@ -11,12 +11,14 @@ package scala.reflect /** This type is required by the compiler and should not be used in client code. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10") class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) { val manifest = implicitly[Manifest[T]] override def toString = "Code(tree = "+tree+", manifest = "+manifest+")" } /** This type is required by the compiler and should not be used in client code. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10") object Code { def lift[A](tree: A): Code[A] = throw new Error("Code was not lifted by compiler") diff --git a/src/library/scala/reflect/api/StandardNames.scala b/src/library/scala/reflect/api/StandardNames.scala new file mode 100644 index 0000000000..81517d2a6b --- /dev/null +++ b/src/library/scala/reflect/api/StandardNames.scala @@ -0,0 +1,21 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Martin Odersky + */ + +package scala.reflect +package api + +trait StandardNames { self: Universe => + + val nme: AbsTermNames + + abstract class AbsTermNames { + val CONSTRUCTOR: TermName + } + + val tpnme: AbsTypeNames + + abstract class AbsTypeNames { + } +} diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 0a38fb45bf..b8b32477dd 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -476,6 +476,17 @@ trait Trees { self: Universe => */ case class New(tpt: Tree) extends TermTree + /** Factory method for object creation `new tpt(args_1)...(args_n)` + * A `New(t, as)` is expanded to: `(new t).(as)` + */ + def New(tpt: Tree, argss: List[List[Tree]]): Tree = { + assert(!argss.isEmpty) + // todo. we need to expose names in scala.reflect.api +// val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) + val superRef: Tree = Select(New(tpt), "") + (superRef /: argss) (Apply) + } + /** Type annotation, eliminated by explicit outer */ case class Typed(expr: Tree, tpt: Tree) extends TermTree @@ -632,10 +643,10 @@ trait Trees { self: Universe => } def TypeTree(tp: Type): TypeTree = TypeTree() setType tp - + /** An empty deferred value definition corresponding to: * val _: _ - * This is used as a placeholder in the `self` parameter Template if there is + * This is used as a placeholder in the `self` parameter Template if there is * no definition of a self value of self type. */ def emptyValDef: ValDef diff --git a/src/library/scala/reflect/api/Universe.scala b/src/library/scala/reflect/api/Universe.scala index 03acbdda2c..a3cec3271b 100755 --- a/src/library/scala/reflect/api/Universe.scala +++ b/src/library/scala/reflect/api/Universe.scala @@ -10,7 +10,8 @@ abstract class Universe extends Symbols with Positions with TreePrinters with AnnotationInfos - with StandardDefinitions { + with StandardDefinitions + with StandardNames { type Position val NoPosition: Position diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala index d0a2787fdf..3b6f96d7a8 100644 --- a/src/library/scala/reflect/macro/Context.scala +++ b/src/library/scala/reflect/macro/Context.scala @@ -12,4 +12,25 @@ trait Context extends api.Universe { */ def referenceCapturedVariable(id: Ident): Tree + /** Given a tree or type, generate a tree that when executed at runtime produces the original tree or type. + * For instance, given the abstract syntax tree representation of the `x + 1` expression: + * + * Apply(Select(Ident("x"), "+"), List(Literal(Constant(1)))) + * + * The reifier transforms it to the following tree: + * + * $mr.Apply($mr.Select($mr.Ident($mr.freeVar("x", , x), "+"), List($mr.Literal($mr.Constant(1)))))) + * + * The transformation looks mostly straightforward, but it has its tricky parts: + * * Reifier retains symbols and types defined outside the reified tree, however + * locally defined entities get erased and replaced with their original trees + * * Free variables are detected and wrapped in symbols of the type FreeVar + * * Mutable variables that are accessed from a local function are wrapped in refs + * * Since reified trees can be compiled outside of the scope they've been created in, + * special measures are taken to ensure that all freeVars remain visible + * + * Typical usage of this function is to retain some of the trees received/created by a macro + * into the form that can be inspected (via pattern matching) or compiled/run (by a reflective ToolBox) during the runtime. + */ + def reify(tree: Tree): Tree } diff --git a/src/partest/scala/tools/partest/PartestTask.scala b/src/partest/scala/tools/partest/PartestTask.scala index a90a61a9aa..524dc06327 100644 --- a/src/partest/scala/tools/partest/PartestTask.scala +++ b/src/partest/scala/tools/partest/PartestTask.scala @@ -15,10 +15,8 @@ import scala.tools.nsc.io.{ Directory, Path => SPath } import nsc.util.ClassPath import util.PathResolver import scala.tools.ant.sabbus.CompilationPathProperty - import java.io.File import java.lang.reflect.Method - import org.apache.tools.ant.Task import org.apache.tools.ant.types.{Path, Reference, FileSet} import org.apache.tools.ant.types.Commandline.Argument @@ -309,6 +307,16 @@ class PartestTask extends Task with CompilationPathProperty { val antRunner = new scala.tools.partest.nest.AntRunner val antFileManager = antRunner.fileManager + // this is a workaround for https://issues.scala-lang.org/browse/SI-5433 + // when that bug is fixed, this paragraph of code can be safely removed + // we hack into the classloader that will become parent classloader for scalac + // this way we ensure that reflective macro lookup will pick correct Code.lift + val loader = getClass.getClassLoader.asInstanceOf[org.apache.tools.ant.AntClassLoader] + val path = new org.apache.tools.ant.types.Path(getProject()) + val newClassPath = ClassPath.join(nest.PathSettings.srcCodeLib.toString, loader.getClasspath) + path.setPath(newClassPath) + loader.setClassPath(path) + antFileManager.showDiff = showDiff antFileManager.showLog = showLog antFileManager.failed = runFailed diff --git a/src/partest/scala/tools/partest/nest/CompileManager.scala b/src/partest/scala/tools/partest/nest/CompileManager.scala index 6604bc551d..7aaa7bab00 100644 --- a/src/partest/scala/tools/partest/nest/CompileManager.scala +++ b/src/partest/scala/tools/partest/nest/CompileManager.scala @@ -12,6 +12,7 @@ import scala.tools.nsc.{ Global, Settings, CompilerCommand, FatalError, io } import scala.tools.nsc.interactive.RangePositions import scala.tools.nsc.reporters.{ Reporter, ConsoleReporter } import scala.tools.nsc.util.{ ClassPath, FakePos } +import scala.tools.nsc.Properties.{ setProp, propOrEmpty } import scala.tools.util.PathResolver import io.Path import java.io.{ File, BufferedReader, PrintWriter, FileReader, Writer, FileWriter, StringWriter } @@ -112,6 +113,7 @@ class DirectCompiler(val fileManager: FileManager) extends SimpleCompiler { try { NestUI.verbose("compiling "+toCompile) NestUI.verbose("with classpath: "+global.classPath.toString) + NestUI.verbose("and java classpath: "+ propOrEmpty("java.class.path")) try new global.Run compile toCompile catch { case FatalError(msg) => diff --git a/src/partest/scala/tools/partest/nest/PathSettings.scala b/src/partest/scala/tools/partest/nest/PathSettings.scala index 04f36ffa11..e0a2f65b80 100644 --- a/src/partest/scala/tools/partest/nest/PathSettings.scala +++ b/src/partest/scala/tools/partest/nest/PathSettings.scala @@ -40,6 +40,13 @@ object PathSettings { sys.error("No instrumented.jar found in %s".format(srcSpecLibDir)) } + // Directory /test/files/codelib + lazy val srcCodeLibDir = Directory(srcDir / "codelib") + + lazy val srcCodeLib: File = findJar(srcCodeLibDir, "code") getOrElse { + sys.error("No code.jar found in %s".format(srcCodeLibDir)) + } + // Directory /build lazy val buildDir: Directory = { val bases = testRoot :: testRoot.parents diff --git a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala index 7c6dd0848f..5cde63dc81 100644 --- a/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala +++ b/src/partest/scala/tools/partest/nest/ReflectiveRunner.scala @@ -53,7 +53,13 @@ class ReflectiveRunner { Array(latestCompFile, latestLibFile, latestPartestFile, latestFjbgFile, latestScalapFile) map (x => io.File(x)) val sepUrls = files map (_.toURL) - val sepLoader = new URLClassLoader(sepUrls, null) + var sepLoader = new URLClassLoader(sepUrls, null) + + // this is a workaround for https://issues.scala-lang.org/browse/SI-5433 + // when that bug is fixed, this paragraph of code can be safely removed + // we hack into the classloader that will become parent classloader for scalac + // this way we ensure that reflective macro lookup will pick correct Code.lift + sepLoader = new URLClassLoader((PathSettings.srcCodeLib +: files) map (_.toURL), null) if (isPartestDebug) println("Loading classes from:\n" + sepUrls.mkString("\n")) diff --git a/src/partest/scala/tools/partest/nest/TestFile.scala b/src/partest/scala/tools/partest/nest/TestFile.scala index 3e5fe35f9e..fc5792e886 100644 --- a/src/partest/scala/tools/partest/nest/TestFile.scala +++ b/src/partest/scala/tools/partest/nest/TestFile.scala @@ -35,6 +35,10 @@ abstract class TestFile(val kind: String) extends TestFileCommon { if (setOutDir) settings.outputDirs setSingleOutput setOutDirTo.path + // adding code.jar to the classpath (to provide Code.lift services for reification tests) + settings.classpath prepend PathSettings.srcCodeLib.toString + if (propIsSet("java.class.path")) setProp("java.class.path", PathSettings.srcCodeLib.toString + ";" + propOrElse("java.class.path", "")) + // have to catch bad flags somewhere (flags forall (f => settings.processArgumentString(f)._1)) && { settings.classpath append fileManager.CLASSPATH diff --git a/src/partest/scala/tools/partest/nest/Worker.scala b/src/partest/scala/tools/partest/nest/Worker.scala index 952d99c318..3f2cb16082 100644 --- a/src/partest/scala/tools/partest/nest/Worker.scala +++ b/src/partest/scala/tools/partest/nest/Worker.scala @@ -520,7 +520,9 @@ class Worker(val fileManager: FileManager, params: TestRunParams) extends Actor runTestCommon(file, expectFailure = false)((logFile, outDir) => { val dir = file.getParentFile - execTest(outDir, logFile) && diffCheck(compareOutput(dir, logFile)) + // adding code.jar to the classpath (to provide Code.lift services for reification tests) + execTest(outDir, logFile, PathSettings.srcCodeLib.toString) && + diffCheck(compareOutput(dir, logFile)) }) // Apache Ant 1.6 or newer diff --git a/src/partest/scala/tools/partest/utils/CodeTest.scala b/src/partest/scala/tools/partest/utils/CodeTest.scala deleted file mode 100644 index c236d89bbd..0000000000 --- a/src/partest/scala/tools/partest/utils/CodeTest.scala +++ /dev/null @@ -1,41 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala Parallel Testing ** -** / __/ __// _ | / / / _ | (c) 2007-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.tools.partest -package utils - -import scala.reflect.Code -import reflect.runtime.Mirror.ToolBox -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings - -/** Runner for testing code tree liftingg - */ -object CodeTest { - def static[T](code: () => T, args: Array[String] = Array()) = { - println("static: "+code()) - } - - def apply[T](code: Code[T], args: Array[String] = Array()) = { - println("testing: "+code.tree) - println("type is: "+code.manifest.tpe) - val isNullary = code.manifest.tpe.typeSymbol == scala.reflect.mirror.definitions.FunctionClass(0) - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter, args mkString " ") - val ttree = toolbox.typeCheck(code.tree, code.manifest.tpe) - println("result = " + toolbox.showAttributed(ttree, printTypes = true, printIds = false)) - var evaluated = toolbox.runExpr(ttree) - if (evaluated != null && isNullary) { - val applyMeth = evaluated.getClass.getMethod("apply") - evaluated = applyMeth.invoke(evaluated) - } - println("evaluated = "+evaluated) - evaluated - } -} diff --git a/test/files/codelib/code.jar.desired.sha1 b/test/files/codelib/code.jar.desired.sha1 new file mode 100644 index 0000000000..5e7acf3b90 --- /dev/null +++ b/test/files/codelib/code.jar.desired.sha1 @@ -0,0 +1 @@ +5880dd44ee9fedec44fed3f223842e42d8a63959 ?code.jar diff --git a/test/files/pos/t531.scala b/test/files/pos/t531.scala index 02763e08f1..856926de4f 100644 --- a/test/files/pos/t531.scala +++ b/test/files/pos/t531.scala @@ -2,9 +2,9 @@ object Test extends App { import scala.reflect._; def titi = { var truc = 0 - val tata: Code[()=>Unit] = () => { + val tata = Code.lift{() => { truc = 6 - } + }} () } } diff --git a/test/files/pos/t532.scala b/test/files/pos/t532.scala index 32649b1629..f864bbf45e 100644 --- a/test/files/pos/t532.scala +++ b/test/files/pos/t532.scala @@ -2,9 +2,9 @@ object Test extends App { import scala.reflect._; def titi: Unit = { var truc = 0 - val tata: Code[()=>Unit] = () => { + val tata = Code.lift{() => { truc = truc + 6 - } + }} () } } diff --git a/test/files/run/code.check b/test/files/run/code.check deleted file mode 100644 index 9b0351bbf9..0000000000 --- a/test/files/run/code.check +++ /dev/null @@ -1,36 +0,0 @@ -testing: ((x: Int) => x.$plus(ys.length)) -type is: Int => Int -result = ((x: Int) => x.+{(x: )Int}(ys.length{Int}){Int}){Int => Int} -evaluated = -testing: (() => { - val e: Element = new Element("someName"); - e -}) -type is: () => Element -result = (() => { - val e: Element = new Element{Element}{(name: )Element}("someName"{String("someName")}){Element}; - e{Element} -}{Element}){() => Element} -evaluated = Element(someName) -testing: (() => truc.elem = 6) -type is: () => Unit -result = (() => truc.elem{Int} = 6{Int(6)}{Unit}){() => Unit} -evaluated = null -testing: (() => truc.elem = truc.elem.$plus(6)) -type is: () => Unit -result = (() => truc.elem{Int} = truc.elem.+{(x: )Int}(6{Int(6)}){Int}{Unit}){() => Unit} -evaluated = null -testing: (() => new baz.BazElement("someName")) -type is: () => baz.BazElement -result = (() => new baz.BazElement{baz.BazElement}{(name: )baz.BazElement}("someName"{String("someName")}){baz.BazElement}){() => baz.BazElement} -evaluated = BazElement(someName) -testing: ((x: Int) => x.$plus(ys.length)) -type is: Int => Int -result = ((x: Int) => x.+{(x: )Int}(ys.length{Int}){Int}){Int => Int} -evaluated = -static: 2 -testing: (() => x.$plus(1)) -type is: () => Int -result = (() => x.+{(x: )Int}(1{Int(1)}){Int}){() => Int} -evaluated = 2 -1+1 = 2 diff --git a/test/files/run/code.scala b/test/files/run/code.scala deleted file mode 100644 index 162f796c63..0000000000 --- a/test/files/run/code.scala +++ /dev/null @@ -1,60 +0,0 @@ -import scala.tools.partest.utils.CodeTest - -case class Element(name: String) - -object Test extends App { - case class InnerElement(name: String) - def foo[T](ys: List[T]) = { - val fun: reflect.Code[Int => Int] = x => x + ys.length - fun - } - CodeTest(foo(List(2)), args) - CodeTest({() => val e = Element("someName"); e}, args) -// CodeTest({() => val e = InnerElement("someName"); e}, args) // (does not work yet) - def titi() = { - var truc = 0 - CodeTest(() => { - truc = 6 - }, args) - } - def tata(): Unit = { - var truc = 0 - CodeTest(() => { - truc = truc + 6 - }, args) - } - titi() - tata() - new baz.A(args) - - def show() { - def foo[T](ys: List[T]) = { - val fun: reflect.Code[Int => Int] = x => x + ys.length - CodeTest(fun, args) - } - foo(List(1, 2, 3)) - } - - show() - - def evaltest(x: Int) = { - CodeTest.static(() => x + 1, args) - CodeTest(() => x + 1, args) - } - - println("1+1 = "+evaltest(1)) -} - - -package baz { - - case class BazElement(name: String) { } - - class A(args: Array[String]) { - CodeTest(() => new baz.BazElement("someName"), args) - } - -} - - - diff --git a/test/files/run/programmatic-main.check b/test/files/run/programmatic-main.check index 4aeb3ab60c..6f253f5de1 100644 --- a/test/files/run/programmatic-main.check +++ b/test/files/run/programmatic-main.check @@ -7,23 +7,22 @@ superaccessors 5 add super accessors in traits and nested classes pickler 6 serialize symbol tables refchecks 7 reference/override checking, translate nested objects - liftcode 8 reify trees - uncurry 9 uncurry, translate function values to anonymous classes - tailcalls 10 replace tail calls by jumps - specialize 11 @specialized-driven class and method specialization - explicitouter 12 this refs to outer pointers, translate patterns - erasure 13 erase types, add interfaces for traits - lazyvals 14 allocate bitmaps, translate lazy vals into lazified defs - lambdalift 15 move nested functions to top level - constructors 16 move field definitions into constructors - flatten 17 eliminate inner classes - mixin 18 mixin composition - cleanup 19 platform-specific cleanups, generate reflective calls - icode 20 generate portable intermediate code - inliner 21 optimization: do inlining -inlineExceptionHandlers 22 optimization: inline exception handlers - closelim 23 optimization: eliminate uncalled closures - dce 24 optimization: eliminate dead code - jvm 25 generate JVM bytecode - terminal 26 The last phase in the compiler chain + uncurry 8 uncurry, translate function values to anonymous classes + tailcalls 9 replace tail calls by jumps + specialize 10 @specialized-driven class and method specialization + explicitouter 11 this refs to outer pointers, translate patterns + erasure 12 erase types, add interfaces for traits + lazyvals 13 allocate bitmaps, translate lazy vals into lazified defs + lambdalift 14 move nested functions to top level + constructors 15 move field definitions into constructors + flatten 16 eliminate inner classes + mixin 17 mixin composition + cleanup 18 platform-specific cleanups, generate reflective calls + icode 19 generate portable intermediate code + inliner 20 optimization: do inlining +inlineExceptionHandlers 21 optimization: inline exception handlers + closelim 22 optimization: eliminate uncalled closures + dce 23 optimization: eliminate dead code + jvm 24 generate JVM bytecode + terminal 25 The last phase in the compiler chain diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala index 825a38dc1d..960f6aec3e 100644 --- a/test/files/run/reify_closure1.scala +++ b/test/files/run/reify_closure1.scala @@ -4,9 +4,9 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { def foo[T](ys: List[T]): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala index b88bec005d..6c28514c2b 100644 --- a/test/files/run/reify_closure2a.scala +++ b/test/files/run/reify_closure2a.scala @@ -4,9 +4,9 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { def foo(y: Int): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala index 6414fa58a3..4444c55ddf 100644 --- a/test/files/run/reify_closure3a.scala +++ b/test/files/run/reify_closure3a.scala @@ -6,9 +6,9 @@ object Test extends App { def foo(y: Int): Int => Int = { def y1 = y - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y1 - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala index 99e9d82706..886e643a47 100644 --- a/test/files/run/reify_closure4a.scala +++ b/test/files/run/reify_closure4a.scala @@ -6,9 +6,9 @@ object Test extends App { def foo(y: Int): Int => Int = { val y1 = y - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y1 - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala index 0ac53d5479..20994abff0 100644 --- a/test/files/run/reify_closure5a.scala +++ b/test/files/run/reify_closure5a.scala @@ -4,9 +4,9 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { def foo[T](ys: List[T]): Int => Int = { - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + ys.length - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala index 54f1791bf2..192c08f701 100644 --- a/test/files/run/reify_closure6.scala +++ b/test/files/run/reify_closure6.scala @@ -7,13 +7,13 @@ object Test extends App { def foo[T](ys: List[T]): Int => Int = { val z = 1 var y = 0 - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { y += 1 q += 1 println("q = " + q) println("y = " + y) x + ys.length * z + q + y - } + }} val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala index 8933df23fa..942c2cda9c 100644 --- a/test/files/run/reify_closure7.scala +++ b/test/files/run/reify_closure7.scala @@ -8,13 +8,13 @@ object Test extends App { def foo[T](ys: List[T]): Int => Int = { val z = 1 var y = 0 - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { y += 1 q += 1 println("q = " + q) println("y = " + y) x + ys.length * z + q + y - } + }} if (clo == null) { val reporter = new ConsoleReporter(new Settings) diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala index 38ef72b6eb..44a25ae1b6 100644 --- a/test/files/run/reify_this.scala +++ b/test/files/run/reify_this.scala @@ -5,7 +5,7 @@ import scala.tools.nsc.Settings import reflect.runtime.Mirror.ToolBox trait Eval { - def eval(code: Code[_]): Any = eval(code.tree) + def eval(code: Code): Any = eval(code.tree) def eval(tree: Tree): Any = { val settings = new Settings diff --git a/test/files/run/t4875.check b/test/files/run/t4875.check deleted file mode 100644 index f7609d5ca5..0000000000 --- a/test/files/run/t4875.check +++ /dev/null @@ -1,17 +0,0 @@ -Type in expressions to have them evaluated. -Type :help for more information. - -scala> - -scala> import scala.reflect.Code -import scala.reflect.Code - -scala> def codeOf[A](code: Code[A]) = code -codeOf: [A](code: scala.reflect.Code[A])scala.reflect.Code[A] - -scala> codeOf((x: Iterable[_]) => throw new Exception) -res0: scala.reflect.Code[Iterable[_] => Nothing] = Code(tree = ((x: Iterable[Any]) => throw new scala.`package`.Exception()), manifest = scala.Function1[scala.collection.Iterable[Any], Nothing]) - -scala> - -scala> diff --git a/test/files/run/t4875.scala b/test/files/run/t4875.scala deleted file mode 100644 index c17211aede..0000000000 --- a/test/files/run/t4875.scala +++ /dev/null @@ -1,12 +0,0 @@ -import scala.tools.nsc.interpreter._ -import scala.tools.partest.ReplTest - -object Test extends ReplTest { - class M[@specialized T] { } - - def code = """ - |import scala.reflect.Code - |def codeOf[A](code: Code[A]) = code - |codeOf((x: Iterable[_]) => throw new Exception) - """.stripMargin -} diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala index e9fb40bede..a1fead07ae 100644 --- a/test/pending/run/reify_closure2b.scala +++ b/test/pending/run/reify_closure2b.scala @@ -5,9 +5,9 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { def foo(y: Int): Int => Int = { class Foo(y: Int) { - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y - } + }} } val reporter = new ConsoleReporter(new Settings) diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala index 5c4f3c81b9..acf07c4749 100644 --- a/test/pending/run/reify_closure3b.scala +++ b/test/pending/run/reify_closure3b.scala @@ -7,9 +7,9 @@ object Test extends App { class Foo(y: Int) { def y1 = y - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y1 - } + }} } val reporter = new ConsoleReporter(new Settings) diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala index 24dfa9fe17..ed102298c5 100644 --- a/test/pending/run/reify_closure4b.scala +++ b/test/pending/run/reify_closure4b.scala @@ -7,9 +7,9 @@ object Test extends App { class Foo(y: Int) { val y1 = y - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + y1 - } + }} } val reporter = new ConsoleReporter(new Settings) diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala index 02eb771f0c..29e911538f 100644 --- a/test/pending/run/reify_closure5b.scala +++ b/test/pending/run/reify_closure5b.scala @@ -5,9 +5,9 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { def foo[T](ys: List[T]): Int => Int = { class Foo[T](ys: List[T]) { - val fun: reflect.Code[Int => Int] = x => { + val fun = reflect.Code.lift{(x: Int) => { x + ys.length - } + }} } val reporter = new ConsoleReporter(new Settings) -- cgit v1.2.3 From f914350f8f5fecce7350a4c7e1a6c2fe447c7324 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 5 Feb 2012 14:26:42 +0100 Subject: Fixes https://issues.scala-lang.org/browse/SI-5272 --- src/compiler/scala/tools/nsc/ast/Reifiers.scala | 4 ++-- test/files/run/t5272_1.check | 1 + test/files/run/t5272_1.scala | 17 +++++++++++++++++ test/files/run/t5272_2.check | 1 + test/files/run/t5272_2.scala | 16 ++++++++++++++++ test/pending/run/t5272.check | 1 - test/pending/run/t5272.scala | 17 ----------------- 7 files changed, 37 insertions(+), 20 deletions(-) create mode 100644 test/files/run/t5272_1.check create mode 100644 test/files/run/t5272_1.scala create mode 100644 test/files/run/t5272_2.check create mode 100644 test/files/run/t5272_2.scala delete mode 100644 test/pending/run/t5272.check delete mode 100644 test/pending/run/t5272.scala diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala index 952110ade2..ef87925959 100644 --- a/src/compiler/scala/tools/nsc/ast/Reifiers.scala +++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala @@ -281,9 +281,9 @@ trait Reifiers { self: Global => private def reifyTree(tree: Tree): Tree = tree match { case EmptyTree => reifyMirrorObject(EmptyTree) - case This(_) if !(boundSyms contains tree.symbol) => + case This(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) => reifyFree(tree) - case Ident(_) if !(boundSyms contains tree.symbol) => + case Ident(_) if tree.symbol != NoSymbol && !(boundSyms contains tree.symbol) => if (tree.symbol.isVariable && tree.symbol.owner.isTerm) { captureVariable(tree.symbol) // Note order dependency: captureVariable needs to come before reifyTree here. mirrorCall("Select", reifyFree(tree), reifyName(nme.elem)) diff --git a/test/files/run/t5272_1.check b/test/files/run/t5272_1.check new file mode 100644 index 0000000000..9f8d6f24e7 --- /dev/null +++ b/test/files/run/t5272_1.check @@ -0,0 +1 @@ +okay \ No newline at end of file diff --git a/test/files/run/t5272_1.scala b/test/files/run/t5272_1.scala new file mode 100644 index 0000000000..3f44d05fb3 --- /dev/null +++ b/test/files/run/t5272_1.scala @@ -0,0 +1,17 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + 2 match { + case 2 => println("okay") + case _ => println("not okay") + } + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5272_2.check b/test/files/run/t5272_2.check new file mode 100644 index 0000000000..549f3f3af8 --- /dev/null +++ b/test/files/run/t5272_2.check @@ -0,0 +1 @@ +okay2 \ No newline at end of file diff --git a/test/files/run/t5272_2.scala b/test/files/run/t5272_2.scala new file mode 100644 index 0000000000..833ee65285 --- /dev/null +++ b/test/files/run/t5272_2.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + 2 match { + case x => println("okay" + x) + } + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/pending/run/t5272.check b/test/pending/run/t5272.check deleted file mode 100644 index dcf02b2fb6..0000000000 --- a/test/pending/run/t5272.check +++ /dev/null @@ -1 +0,0 @@ -okay diff --git a/test/pending/run/t5272.scala b/test/pending/run/t5272.scala deleted file mode 100644 index 3f44d05fb3..0000000000 --- a/test/pending/run/t5272.scala +++ /dev/null @@ -1,17 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - 2 match { - case 2 => println("okay") - case _ => println("not okay") - } - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} -- cgit v1.2.3 From 75696bc3d310a53594d97853f38bb0dbfef42390 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Sun, 5 Feb 2012 16:56:48 +0100 Subject: Fixes https://issues.scala-lang.org/browse/SI-5334 New version of reification isn't susceptible to this bug. The problem was with Code.lift generating not only a tree, but also a manifest with the type of that tree. That led to an issue in the case of the type of the manifest depending on a class declared inside the quasiquote. Now manifests in reification are gone, so is the problem. --- test/files/run/t5334_1.check | 2 ++ test/files/run/t5334_1.scala | 16 ++++++++++++++++ test/files/run/t5334_2.check | 2 ++ test/files/run/t5334_2.scala | 16 ++++++++++++++++ test/pending/run/t5334_1.scala | 15 --------------- test/pending/run/t5334_2.scala | 15 --------------- 6 files changed, 36 insertions(+), 30 deletions(-) create mode 100644 test/files/run/t5334_1.check create mode 100644 test/files/run/t5334_1.scala create mode 100644 test/files/run/t5334_2.check create mode 100644 test/files/run/t5334_2.scala delete mode 100644 test/pending/run/t5334_1.scala delete mode 100644 test/pending/run/t5334_2.scala diff --git a/test/files/run/t5334_1.check b/test/files/run/t5334_1.check new file mode 100644 index 0000000000..e09aedaede --- /dev/null +++ b/test/files/run/t5334_1.check @@ -0,0 +1,2 @@ +C +C \ No newline at end of file diff --git a/test/files/run/t5334_1.scala b/test/files/run/t5334_1.scala new file mode 100644 index 0000000000..7acf282bb8 --- /dev/null +++ b/test/files/run/t5334_1.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { override def toString = "C" } + new C + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + println(ttree.tpe) + println(toolbox.runExpr(ttree)) +} diff --git a/test/files/run/t5334_2.check b/test/files/run/t5334_2.check new file mode 100644 index 0000000000..2ae76754c0 --- /dev/null +++ b/test/files/run/t5334_2.check @@ -0,0 +1,2 @@ +List[(C, C)] +List((C,C)) \ No newline at end of file diff --git a/test/files/run/t5334_2.scala b/test/files/run/t5334_2.scala new file mode 100644 index 0000000000..26f0778400 --- /dev/null +++ b/test/files/run/t5334_2.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { override def toString() = "C" } + List((new C, new C)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + println(ttree.tpe) + println(toolbox.runExpr(ttree)) +} diff --git a/test/pending/run/t5334_1.scala b/test/pending/run/t5334_1.scala deleted file mode 100644 index c1eba89c2b..0000000000 --- a/test/pending/run/t5334_1.scala +++ /dev/null @@ -1,15 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class C - new C - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5334_2.scala b/test/pending/run/t5334_2.scala deleted file mode 100644 index 361b8c85f2..0000000000 --- a/test/pending/run/t5334_2.scala +++ /dev/null @@ -1,15 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class C - List((new C, new C)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} -- cgit v1.2.3 From d8919f1cee1f705d0ba94f65ef41e98a2729558f Mon Sep 17 00:00:00 2001 From: Ruediger Klaehn Date: Sun, 5 Feb 2012 17:51:30 +0100 Subject: added reference equality checks to improve structural sharing added reference equality checks to updated0 and removed0 to prevent creation of a new map when updating an entry with the same value or removing an entry that was not present to begin with. --- src/library/scala/collection/immutable/HashMap.scala | 19 ++++++++++++------- 1 file changed, 12 insertions(+), 7 deletions(-) diff --git a/src/library/scala/collection/immutable/HashMap.scala b/src/library/scala/collection/immutable/HashMap.scala index 9cde20f1df..6b11371bec 100644 --- a/src/library/scala/collection/immutable/HashMap.scala +++ b/src/library/scala/collection/immutable/HashMap.scala @@ -138,8 +138,10 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { override def updated0[B1 >: B](key: A, hash: Int, level: Int, value: B1, kv: (A, B1), merger: Merger[B1]): HashMap[A, B1] = if (hash == this.hash && key == this.key ) { - if (merger eq null) new HashMap1(key, hash, value, kv) - else new HashMap1(key, hash, value, merger(this.kv, kv)) + if (merger eq null) { + if(this.value.asInstanceOf[AnyRef] eq value.asInstanceOf[AnyRef]) this + else new HashMap1(key, hash, value, kv) + } else new HashMap1(key, hash, value, merger(this.kv, kv)) } else { var thatindex = (hash >>> level) & 0x1f var thisindex = (this.hash >>> level) & 0x1f @@ -271,13 +273,15 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val mask = (1 << index) val offset = Integer.bitCount(bitmap & (mask-1)) if ((bitmap & mask) != 0) { - val elemsNew = new Array[HashMap[A,B1]](elems.length) - Array.copy(elems, 0, elemsNew, 0, elems.length) val sub = elems(offset) // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.updated0(key, hash, level + 5, value, kv, merger) - elemsNew(offset) = subNew - new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + if(subNew eq sub) this else { + val elemsNew = new Array[HashMap[A,B1]](elems.length) + Array.copy(elems, 0, elemsNew, 0, elems.length) + elemsNew(offset) = subNew + new HashTrieMap(bitmap, elemsNew, size + (subNew.size - sub.size)) + } } else { val elemsNew = new Array[HashMap[A,B1]](elems.length + 1) Array.copy(elems, 0, elemsNew, 0, offset) @@ -295,7 +299,8 @@ object HashMap extends ImmutableMapFactory[HashMap] with BitOperations.Int { val sub = elems(offset) // TODO: might be worth checking if sub is HashTrieMap (-> monomorphic call site) val subNew = sub.removed0(key, hash, level + 5) - if (subNew.isEmpty) { + if (subNew eq sub) this + else if (subNew.isEmpty) { val bitmapNew = bitmap ^ mask if (bitmapNew != 0) { val elemsNew = new Array[HashMap[A,B]](elems.length - 1) -- cgit v1.2.3 From e34098b7f6e37420198fa5c7c2820d0443b46cc4 Mon Sep 17 00:00:00 2001 From: Vlad Ureche Date: Sun, 5 Feb 2012 19:32:52 +0100 Subject: Added a rootdoc page for the compiler API scaladoc --- build.xml | 5 ++-- src/compiler/rootdoc.txt | 6 +++++ .../scala/tools/nsc/doc/html/HtmlFactory.scala | 4 +--- .../tools/nsc/doc/html/resource/lib/rootdoc.txt | 27 ---------------------- src/library/rootdoc.txt | 27 ++++++++++++++++++++++ 5 files changed, 37 insertions(+), 32 deletions(-) create mode 100644 src/compiler/rootdoc.txt delete mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt create mode 100644 src/library/rootdoc.txt diff --git a/build.xml b/build.xml index 57d2eed1c0..e65b66219b 100644 --- a/build.xml +++ b/build.xml @@ -1530,7 +1530,7 @@ DOCUMENTATION docUncompilable="${src.dir}/library-aux" sourcepath="${src.dir}" classpathref="pack.classpath" - docRootContent="${build-docs.dir}/library/lib/rootdoc.txt"> + docRootContent="${src.dir}/library/rootdoc.txt"> @@ -1613,7 +1613,8 @@ DOCUMENTATION docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1" sourcepath="${src.dir}" classpathref="pack.classpath" - srcdir="${src.dir}/compiler"> + srcdir="${src.dir}/compiler" + docRootContent="${src.dir}/compiler/rootdoc.txt"> diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt new file mode 100644 index 0000000000..173f604098 --- /dev/null +++ b/src/compiler/rootdoc.txt @@ -0,0 +1,6 @@ +The Scala compiler API. + +The following resources are useful for Scala plugin/compiler development: + - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]] + - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]] + - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala index c21507ef45..4f05678d85 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -80,9 +80,7 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) { "selected.png", "selected2-right.png", "selected2.png", - "unselected.png", - - "rootdoc.txt" + "unselected.png" ) /** Generates the Scaladoc site for a model into the site root. diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt deleted file mode 100644 index 6145429f1e..0000000000 --- a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt +++ /dev/null @@ -1,27 +0,0 @@ -This is the documentation for the Scala standard library. - -== Package structure == - -The [[scala]] package contains core types. - -scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation. - -Other important packages include: - - - scala.[[scala.actors]] - Concurrency framework inspired by Erlang. - - scala.[[scala.io]] - Input and output. - - scala.[[scala.math]] - Basic math functions and additional numeric types. - - scala.[[scala.sys]] - Interaction with other processes and the operating system. - - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions. - - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing. - - scala.[[scala.xml]] - XML parsing, manipulation, and serialization. - -Many other packages exist. See the complete list on the left. - -== Automatic imports == - -Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default. - -Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]]. - -Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String. diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt new file mode 100644 index 0000000000..6145429f1e --- /dev/null +++ b/src/library/rootdoc.txt @@ -0,0 +1,27 @@ +This is the documentation for the Scala standard library. + +== Package structure == + +The [[scala]] package contains core types. + +scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation. + +Other important packages include: + + - scala.[[scala.actors]] - Concurrency framework inspired by Erlang. + - scala.[[scala.io]] - Input and output. + - scala.[[scala.math]] - Basic math functions and additional numeric types. + - scala.[[scala.sys]] - Interaction with other processes and the operating system. + - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions. + - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing. + - scala.[[scala.xml]] - XML parsing, manipulation, and serialization. + +Many other packages exist. See the complete list on the left. + +== Automatic imports == + +Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default. + +Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]]. + +Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String. -- cgit v1.2.3 From 7946ac410ad74894cd0eb6dfd29447f173911b99 Mon Sep 17 00:00:00 2001 From: Vlad Ureche Date: Sun, 5 Feb 2012 22:00:49 +0100 Subject: Scaladoc @usecase annotation overriding / SI-5287 From now on, the usecases inherit the comments from their parents, such as the explanation and the annotations: @param, @tparam, @return, etc. An example of usecase comment inheritance is: /** * The test function tests the parameter param for ... * * @param theParam the implicit parameter to be tested for ... * @return the result of the test * * * * @usecase def test(): Bool * * The test function tests the parameter taken implicitly from scope. * Example: `test()` * * @return the result of the test for the current scope * * * * @usecase def test(theParam: SomeType): Bool * * This takes the explicit value passed. * Example: `test(3)` * * @param theParam the explicit parameter to be tested for ... */ def test(implicit theParam: SomeType): Bool Notice both usecases override the explanation with their own examples. The first usecase also overrides the "@return" annotation while the 2nd usecase overrides the "@param theParam" annotation. If they didn't override the explanations and annotations, they would inherit the values from the actual implementation, def test(implicit ...) This will be followed by @inheritdoc, which enables more fine-grained control over comment inheritance. The full explanation of using comment inheritance and @inheritdoc and their interaction with variables is given at https://wiki.scala-lang.org/display/SW/Tags+and+Annotations in the "Comment inheritance" and "Inheritance Example" sections. --- src/compiler/scala/tools/nsc/util/DocStrings.scala | 26 ++- .../resources/implicit-inheritance-override.scala | 41 +++++ .../resources/implicit-inheritance-usecase.scala | 57 +++++++ test/scaladoc/scala/html/HtmlFactoryTest.scala | 189 +++++++++++++++++---- 4 files changed, 277 insertions(+), 36 deletions(-) create mode 100644 test/scaladoc/resources/implicit-inheritance-override.scala create mode 100644 test/scaladoc/resources/implicit-inheritance-usecase.scala diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index 1db6c38b4d..2c8b77be71 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -71,13 +71,35 @@ object DocStrings { * Every section starts with a `@` and extends to the next `@`, or * to the end of the comment string, but excluding the final two * characters which terminate the comment. + * + * Also take usecases into account - they need to expand until the next + * @usecase or the end of the string, as they might include other sections + * of their own */ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match { case List() => List() - case idxs => idxs zip (idxs.tail ::: List(str.length - 2)) + case idxs => { + val idxs2 = mergeUsecaseSections(str, idxs) + idxs2 zip (idxs2.tail ::: List(str.length - 2)) + } } - + + /** + * Merge sections following an @usecase into the usecase comment, so they + * can override the parent symbol's sections + */ + def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = { + idxs.find(str.substring(_).startsWith("@usecase")) match { + case Some(firstUC) => + val commentSections = idxs.take(idxs.indexOf(firstUC)) + val usecaseSections = idxs.drop(idxs.indexOf(firstUC)).filter(str.substring(_).startsWith("@usecase")) + commentSections ::: usecaseSections + case None => + idxs + } + } + /** Does interval `iv` start with given `tag`? */ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean = diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala new file mode 100644 index 0000000000..85b8e8d543 --- /dev/null +++ b/test/scaladoc/resources/implicit-inheritance-override.scala @@ -0,0 +1,41 @@ +// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc) +class Base { + /** + * The base comment. And another sentence... + * + * @param arg1 The T term comment + * @param arg2 The string comment + * @tparam T the type of the first argument + * @return The return comment + */ + def function[T](arg1: T, arg2: String): Double = 0.0d +} + +class DerivedA extends Base { + /** + * Overriding the comment, the params and returns comments should stay the same. + */ + override def function[T](arg1: T, arg2: String): Double = 1.0d +} + +class DerivedB extends Base { + /** + * @param arg1 The overridden T term comment + * @param arg2 The overridden string comment + */ + override def function[T](arg1: T, arg2: String): Double = 2.0d +} + +class DerivedC extends Base { + /** + * @return The overridden return comment + */ + override def function[T](arg1: T, arg2: String): Double = 3.0d +} + +class DerivedD extends Base { + /** + * @tparam T The overriden type parameter comment + */ + override def function[T](arg1: T, arg2: String): Double = 3.0d +} \ No newline at end of file diff --git a/test/scaladoc/resources/implicit-inheritance-usecase.scala b/test/scaladoc/resources/implicit-inheritance-usecase.scala new file mode 100644 index 0000000000..8dd1262e4b --- /dev/null +++ b/test/scaladoc/resources/implicit-inheritance-usecase.scala @@ -0,0 +1,57 @@ +// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc) +/** Testing use case inheritance */ +class UseCaseInheritance { + /** + * The base comment. And another sentence... + * + * @param arg1 The T term comment + * @param arg2 The string comment + * @tparam T The type parameter + * @return The return comment + * + * @usecase def missing_arg[T](arg1: T): Double + * + * @usecase def missing_targ(arg1: Int, arg2: String): Double + * + * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double + * @param arg1 The overridden T term comment + * + * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double + * @tparam T The overridden type parameter comment + * + * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double + * @return The overridden return comment + * + * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double + * @param arg3 The added float comment + * + * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double + * The overridden comment. + */ + def function[T](implicit arg1: T, arg2: String): Double = 0.0d +} + +/** Testing the override-use case interaction */ +class UseCaseOverrideInheritance extends UseCaseInheritance { + /** + * @usecase def missing_arg[T](arg1: T): Double + * + * @usecase def missing_targ(arg1: Int, arg2: String): Double + * + * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double + * @param arg1 The overridden T term comment + * + * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double + * @tparam T The overridden type parameter comment + * + * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double + * @return The overridden return comment + * + * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double + * @param arg3 The added float comment + * + * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double + * The overridden comment. + */ + override def function[T](implicit arg1: T, arg2: String): Double = 0.0d +} diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala index e2687dd510..37aa302ac7 100644 --- a/test/scaladoc/scala/html/HtmlFactoryTest.scala +++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala @@ -84,12 +84,7 @@ object Test extends Properties("HtmlFactory") { val html = scala.stripSuffix(".scala") + ".html" createTemplates(scala)(html) } - - /** - * See checkTextOnly(scalaFile: String, checks: List[String]) - */ - def checkText1(scalaFile: String, check: String, debug: Boolean = true): Boolean = checkText(scalaFile, List(check), debug) - + /** * This tests the text without the markup - ex: * @@ -111,20 +106,31 @@ object Test extends Properties("HtmlFactory") { * * NOTE: Comparison is done ignoring all whitespace */ - def checkText(scalaFile: String, checks: List[String], debug: Boolean = true): Boolean = { + def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = { val htmlFile = scalaFile.stripSuffix(".scala") + ".html" - val htmlText = createTemplates(scalaFile)(htmlFile).text.replace('→',' ').replaceAll("\\s+","") + val htmlAllFiles = createTemplates(scalaFile) var result = true - for (check <- checks) { - val checkText = check.replace('→',' ').replaceAll("\\s+","") - val checkValue = htmlText.contains(checkText) - if (debug && (!checkValue)) { - Console.err.println("Check failed: ") - Console.err.println("HTML: " + htmlText) - Console.err.println("Check: " + checkText) - } - result &&= checkValue + for ((fileHint, check, expected) <- checks) { + // resolve the file to be checked + val fileName = fileHint match { + case Some(file) => + if (file endsWith ".html") + file + else + file + ".html" + case None => + htmlFile + } + val fileText = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+","") + val checkText = check.replace('→',' ').replaceAll("\\s+","") + val checkValue = fileText.contains(checkText) == expected + if (debug && (!checkValue)) { + Console.err.println("Check failed: ") + Console.err.println("HTML: " + fileText) + Console.err.println("Check: " + checkText) + } + result &&= checkValue } result @@ -426,40 +432,155 @@ object Test extends Properties("HtmlFactory") { createTemplate("SI_4898.scala") true } - + property("Use cases should override their original members") = - checkText1("SI_5054_q1.scala", """def test(): Int""") && - !checkText1("SI_5054_q1.scala", """def test(implicit lost: Int): Int""") - + checkText("SI_5054_q1.scala")( + (None,"""def test(): Int""", true), + (None,"""def test(implicit lost: Int): Int""", false) + ) property("Use cases should keep their flags - final should not be lost") = - checkText1("SI_5054_q2.scala", """final def test(): Int""") + checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true)) property("Use cases should keep their flags - implicit should not be lost") = - checkText1("SI_5054_q3.scala", """implicit def test(): Int""") - + checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true)) + property("Use cases should keep their flags - real abstract should not be lost") = - checkText1("SI_5054_q4.scala", """abstract def test(): Int""") + checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true)) property("Use cases should keep their flags - traits should not be affected") = - checkText1("SI_5054_q5.scala", """def test(): Int""") + checkText("SI_5054_q5.scala")((None, """def test(): Int""", true)) property("Use cases should keep their flags - traits should not be affected") = - checkText1("SI_5054_q6.scala", """abstract def test(): Int""") + checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true)) property("Use case individual signature test") = - checkText("SI_5054_q7.scala", List( - """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", - """abstract def test1(): Int [use case] This takes the implicit value in scope.""")) + checkText("SI_5054_q7.scala")( + (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true), + (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true) + ) property("Display correct \"Definition classes\"") = - checkText1("SI_5287.scala", - """def method(): Int + checkText("SI_5287.scala")( + (None, + """def method(): Int [use case] The usecase explanation [use case] The usecase explanation - Definition Classes SI_5287 SI_5287_B SI_5287_A""", debug=true) - // explanation appears twice, as small comment and full comment + Definition Classes SI_5287 SI_5287_B SI_5287_A""", true) + ) // the explanation appears twice, as small comment and full comment + + property("Correct comment inheritance for overriding") = + checkText("implicit-inheritance-override.scala")( + (Some("Base"), + """def function[T](arg1: T, arg2: String): Double + The base comment. + The base comment. And another sentence... + T the type of the first argument + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true), + (Some("DerivedA"), + """def function[T](arg1: T, arg2: String): Double + Overriding the comment, the params and returns comments should stay the same. + Overriding the comment, the params and returns comments should stay the same. + T the type of the first argument + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true), + (Some("DerivedB"), + """def function[T](arg1: T, arg2: String): Double + T the type of the first argument + arg1 The overridden T term comment + arg2 The overridden string comment + returns The return comment + """, true), + (Some("DerivedC"), + """def function[T](arg1: T, arg2: String): Double + T the type of the first argument + arg1 The T term comment + arg2 The string comment + returns The overridden return comment + """, true), + (Some("DerivedD"), + """def function[T](arg1: T, arg2: String): Double + T The overriden type parameter comment + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true) + ) + + for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) { + property("Correct comment inheritance for usecases") = + checkText("implicit-inheritance-usecase.scala")( + (Some(useCaseFile), + """def missing_arg[T](arg1: T): Double + [use case] + [use case] + T The type parameter + arg1 The T term comment + returns The return comment + """, true), + (Some(useCaseFile), + """def missing_targ(arg1: Int, arg2: String): Double + [use case] + [use case] + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true), + (Some(useCaseFile), + """def overridden_arg1[T](implicit arg1: T, arg2: String): Double + [use case] + [use case] + T The type parameter + arg1 The overridden T term comment + arg2 The string comment + returns The return comment + """, true), + (Some(useCaseFile), + """def overridden_targ[T](implicit arg1: T, arg2: String): Double + [use case] + [use case] + T The overridden type parameter comment + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true), + (Some(useCaseFile), + """def overridden_return[T](implicit arg1: T, arg2: String): Double + [use case] + [use case] + T The type parameter + arg1 The T term comment + arg2 The string comment + returns The overridden return comment + """, true), + (Some(useCaseFile), + """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double + [use case] + [use case] + T The type parameter + arg1 The T term comment + arg2 The string comment + arg3 The added float comment + returns The return comment + """, true), + (Some(useCaseFile), + """def overridden_comment[T](implicit arg1: T, arg2: String): Double + [use case] The overridden comment. + [use case] The overridden comment. + T The type parameter + arg1 The T term comment + arg2 The string comment + returns The return comment + """, true) + ) + } + { val files = createTemplates("basic.scala") //println(files) -- cgit v1.2.3 From 25c6d0a8bc23da696d76dd99ac670adb6eece2c3 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 5 Feb 2012 19:15:39 -0800 Subject: Reimplemented Modifer. Couldn't live with a scala.Enumeration being a permanent fixture in the reflection library. Rolled it by hand. --- src/compiler/scala/reflect/internal/Flags.scala | 8 +-- src/compiler/scala/reflect/internal/Symbols.scala | 4 +- src/compiler/scala/reflect/internal/Trees.scala | 6 +- .../scala/tools/nsc/ast/ReifyPrinters.scala | 14 +--- src/library/scala/reflect/api/Modifier.scala | 83 ++++++++++++++++++++-- src/library/scala/reflect/api/Symbols.scala | 4 +- src/library/scala/reflect/api/TreePrinters.scala | 13 +--- src/library/scala/reflect/api/Trees.scala | 6 +- 8 files changed, 94 insertions(+), 44 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala index 66af92be5f..aa696bc6e8 100644 --- a/src/compiler/scala/reflect/internal/Flags.scala +++ b/src/compiler/scala/reflect/internal/Flags.scala @@ -466,7 +466,7 @@ class Flags extends ModifierFlags { } protected final val rawFlagPickledOrder: Array[Long] = pickledListOrder.toArray - def flagOfModifier(mod: Modifier.Value): Long = mod match { + def flagOfModifier(mod: Modifier): Long = mod match { case Modifier.`protected` => PROTECTED case Modifier.`private` => PRIVATE case Modifier.`override` => OVERRIDE @@ -496,13 +496,13 @@ class Flags extends ModifierFlags { case Modifier.bynameParameter => BYNAMEPARAM } - def flagsOfModifiers(mods: List[Modifier.Value]): Long = + def flagsOfModifiers(mods: List[Modifier]): Long = (mods :\ 0L) { (mod, curr) => curr | flagOfModifier(mod) } - def modifierOfFlag(flag: Long): Option[Modifier.Value] = + def modifierOfFlag(flag: Long): Option[Modifier] = Modifier.values find { mod => flagOfModifier(mod) == flag } - def modifiersOfFlags(flags: Long): List[Modifier.Value] = + def modifiersOfFlags(flags: Long): List[Modifier] = pickledListOrder map (mask => modifierOfFlag(flags & mask)) flatMap { mod => mod } } diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index e777491300..b9ba269ee3 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -103,12 +103,12 @@ trait Symbols extends api.Symbols { self: SymbolTable => def pos = rawpos def setPos(pos: Position): this.type = { this.rawpos = pos; this } - override def hasModifier(mod: Modifier.Value) = + override def hasModifier(mod: Modifier) = hasFlag(flagOfModifier(mod)) && (!(mod == Modifier.bynameParameter) || isTerm) && (!(mod == Modifier.covariant) || isType) - override def allModifiers: Set[Modifier.Value] = + override def allModifiers: Set[Modifier] = Modifier.values filter hasModifier // ------ creators ------------------------------------------------------------------- diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index 076a7722ae..f982c93656 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -72,9 +72,9 @@ trait Trees extends api.Trees { self: SymbolTable => def withPosition(flag: Long, position: Position) = copy() setPositions positions + (flag -> position) - override def hasModifier(mod: Modifier.Value) = + override def hasModifier(mod: Modifier) = hasFlag(flagOfModifier(mod)) - override def allModifiers: Set[Modifier.Value] = + override def allModifiers: Set[Modifier] = Modifier.values filter hasModifier override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = Modifiers(flags, privateWithin, f(annotations)) setPositions positions @@ -85,7 +85,7 @@ trait Trees extends api.Trees { self: SymbolTable => def Modifiers(flags: Long, privateWithin: Name): Modifiers = Modifiers(flags, privateWithin, List()) def Modifiers(flags: Long): Modifiers = Modifiers(flags, tpnme.EMPTY) - def Modifiers(mods: Set[Modifier.Value], + def Modifiers(mods: Set[Modifier], privateWithin: Name, annotations: List[Tree]): Modifiers = { val flagSet = mods map flagOfModifier diff --git a/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala index aebde18703..98135fadda 100644 --- a/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala +++ b/src/compiler/scala/tools/nsc/ast/ReifyPrinters.scala @@ -18,16 +18,6 @@ trait ReifyPrinters { self: NodePrinters => object reifiedNodeToString extends Function1[Tree, String] { def apply(tree: Tree): String = { import scala.reflect.api.Modifier - import scala.reflect.api.Modifier._ - - def copypasteModifier(mod: Modifier.Value): String = mod match { - case mod @ ( - `protected` | `private` | `override` | - `abstract` | `final` | `sealed` | - `implicit` | `lazy` | `macro` | - `case` | `trait`) => "`" + mod.toString + "`" - case mod => mod.toString - } // @PP: I fervently hope this is a test case or something, not anything being // depended upon. Of more fragile code I cannot conceive. @@ -47,7 +37,7 @@ trait ReifyPrinters { self: NodePrinters => val buf = new StringBuilder val flags = m.group(1).toLong - val s_flags = Flags.modifiersOfFlags(flags) map copypasteModifier mkString ", " + val s_flags = Flags.modifiersOfFlags(flags) map (_.sourceString) mkString ", " if (s_flags != "") buf.append("Set(" + s_flags + ")") @@ -63,7 +53,7 @@ trait ReifyPrinters { self: NodePrinters => }) s = """setInternalFlags\((\d+)L\)""".r.replaceAllIn(s, m => { val flags = m.group(1).toLong - val mods = Flags.modifiersOfFlags(flags) map copypasteModifier + val mods = Flags.modifiersOfFlags(flags) map (_.sourceString) "setInternalFlags(flagsOfModifiers(List(" + mods.mkString(", ") + ")))" }) diff --git a/src/library/scala/reflect/api/Modifier.scala b/src/library/scala/reflect/api/Modifier.scala index 8569b103cf..c0123ed955 100644 --- a/src/library/scala/reflect/api/Modifier.scala +++ b/src/library/scala/reflect/api/Modifier.scala @@ -1,11 +1,82 @@ package scala.reflect.api -object Modifier extends Enumeration { +import collection.{ immutable, mutable } - val `protected`, `private`, `override`, `abstract`, `final`, - `sealed`, `implicit`, `lazy`, `macro`, `case`, `trait`, - deferred, interface, mutable, parameter, covariant, contravariant, - preSuper, abstractOverride, local, java, static, caseAccessor, - defaultParameter, defaultInit, paramAccessor, bynameParameter = Value +sealed abstract class Modifier { + def name: String + def isKeyword: Boolean + def sourceString: String = if (isKeyword) "`" + name + "`" else name + override def equals(that: Any) = this eq that.asInstanceOf[AnyRef] + override def hashCode = name.hashCode + override def toString = name +} +final class SymbolModifier private (val name: String, val isKeyword: Boolean) extends Modifier { + def this(name: String) = this(name, false) +} +final class SourceModifier private (val name: String) extends Modifier { + def isKeyword = true +} + +object SymbolModifier { + private val seen = mutable.ListBuffer[SymbolModifier]() + private[api] def apply(name: String): SymbolModifier = { + val mod = name match { + case "case" | "trait" => new SymbolModifier(name, isKeyword = true) + case _ => new SymbolModifier(name) + } + seen += mod + mod + } + private[api] def all = seen.toList +} +object SourceModifier { + private val seen = mutable.ListBuffer[SourceModifier]() + private[api] def apply(name: String): SourceModifier = { + val mod = new SourceModifier(name) + seen += mod + mod + } + private[api] def all = seen.toList +} + +object Modifier extends immutable.Set[Modifier] { + val `abstract` = SourceModifier("abstract") + val `final` = SourceModifier("final") + val `implicit` = SourceModifier("implicit") + val `lazy` = SourceModifier("lazy") + val `macro` = SourceModifier("macro") + val `override` = SourceModifier("override") + val `private` = SourceModifier("private") + val `protected` = SourceModifier("protected") + val `sealed` = SourceModifier("sealed") + + val `case` = SymbolModifier("case") + val `trait` = SymbolModifier("trait") + val abstractOverride = SymbolModifier("abstractOverride") + val bynameParameter = SymbolModifier("bynameParameter") + val caseAccessor = SymbolModifier("caseAccessor") + val contravariant = SymbolModifier("contravariant") + val covariant = SymbolModifier("covariant") + val defaultInit = SymbolModifier("defaultInit") + val defaultParameter = SymbolModifier("defaultParameter") + val deferred = SymbolModifier("deferred") + val interface = SymbolModifier("interface") + val java = SymbolModifier("java") + val local = SymbolModifier("local") + val mutable = SymbolModifier("mutable") + val paramAccessor = SymbolModifier("paramAccessor") + val parameter = SymbolModifier("parameter") + val preSuper = SymbolModifier("preSuper") + val static = SymbolModifier("static") + + val sourceModifiers: Set[SourceModifier] = SourceModifier.all.toSet + val symbolModifiers: Set[SymbolModifier] = SymbolModifier.all.toSet + val allModifiers: Set[Modifier] = sourceModifiers ++ symbolModifiers + def values = allModifiers + + def contains(key: Modifier) = allModifiers(key) + def iterator = allModifiers.iterator + def -(elem: Modifier) = allModifiers - elem + def +(elem: Modifier) = allModifiers + elem } diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala index 17d9b06324..65a3680fdd 100755 --- a/src/library/scala/reflect/api/Symbols.scala +++ b/src/library/scala/reflect/api/Symbols.scala @@ -9,11 +9,11 @@ trait Symbols { self: Universe => /** The modifiers of this symbol */ - def allModifiers: Set[Modifier.Value] + def allModifiers: Set[Modifier] /** Does this symbol have given modifier? */ - def hasModifier(mod: Modifier.Value): Boolean + def hasModifier(mod: Modifier): Boolean /** The owner of this symbol. This is the symbol * that directly contains the current symbol's definition. diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala index 88ef450ed9..70a100015b 100644 --- a/src/library/scala/reflect/api/TreePrinters.scala +++ b/src/library/scala/reflect/api/TreePrinters.scala @@ -31,17 +31,6 @@ trait TreePrinters { self: Universe => // emits more or less verbatim representation of the provided tree // todo. when LiftCode becomes a macro, throw this code away and use that macro class RawTreePrinter(out: PrintWriter) extends TreePrinter { - import scala.reflect.api.Modifier - import scala.reflect.api.Modifier._ - - def copypasteModifier(mod: Modifier.Value): String = mod match { - case mod @ ( - `protected` | `private` | `override` | - `abstract` | `final` | `sealed` | - `implicit` | `lazy` | `macro` | - `case` | `trait`) => "`" + mod.toString + "`" - case mod => mod.toString - } def print(args: Any*): Unit = args foreach { case EmptyTree => @@ -77,7 +66,7 @@ trait TreePrinters { self: Universe => print(")") case mods: Modifiers => val parts = collection.mutable.ListBuffer[String]() - parts += "Set(" + mods.allModifiers.map{copypasteModifier}.mkString(", ") + ")" + parts += "Set(" + mods.allModifiers.map(_.sourceString).mkString(", ") + ")" parts += "newTypeName(\"" + mods.privateWithin.toString + "\")" parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")" diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index b8b32477dd..3b48a02949 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -16,14 +16,14 @@ trait Trees { self: Universe => type Modifiers <: AbsModifiers abstract class AbsModifiers { - def hasModifier(mod: Modifier.Value): Boolean - def allModifiers: Set[Modifier.Value] + def hasModifier(mod: Modifier): Boolean + def allModifiers: Set[Modifier] def privateWithin: Name // default: EmptyTypeName def annotations: List[Tree] // default: List() def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers } - def Modifiers(mods: Set[Modifier.Value] = Set(), + def Modifiers(mods: Set[Modifier] = Set(), privateWithin: Name = EmptyTypeName, annotations: List[Tree] = List()): Modifiers -- cgit v1.2.3 From 9d00ea8d389f4426f1f644e0a7f48e9ea380e9fc Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 5 Feb 2012 09:49:30 -0800 Subject: Refining the reflection api. In the pursuit of simplicity and consistency. - Method names like getType, getClass, and getValue are far too ambiguous, both internally and especially with java reflection names. Methods which accept or return scala symbols should not refer to them as "classes" in the reflection library. (We can live with the FooClass convention for naming the well-known symbols, it's names like "getClass" and "classToType" which are needlessly conflationary.) - Meaningless names like "subst" have to be expanded. - We should hew closely to the terms which are used by scala programmers wherever possible, thus using "thisType" to mean "C.this" can only beget confusion, given that "thisType" doesn't mean "this.type" but what is normally called the "self type." - It's either "enclosing" or "encl", not both, and similar consistency issues. - Eliminated getAnnotations. - Removed what I could get away with from the API; would like to push those which are presently justified as being "required for LiftCode" out of the core. - Changed a number of AnyRefs to Any both on general principles and because before long it may actually matter. - There are !!!s scattered all over this commit, mostly where I think the name could be better. - I think we should standardize on method names like "vmSignature, vmClass" etc. when we are talking about jvm (and ostensibly other vm) things. There are a bunch more places to make this distinction clear (e.g. Symbol's javaBinaryName, etc.) - There is a lot more I want to do on this and I don't know where the time will come from to do it. Review by @odersky, @scalamacros. --- .../scala/reflect/internal/Definitions.scala | 3 + src/compiler/scala/reflect/internal/Names.scala | 7 ++ src/compiler/scala/reflect/internal/StdNames.scala | 2 +- src/compiler/scala/reflect/internal/Symbols.scala | 37 +++++--- src/compiler/scala/reflect/internal/Trees.scala | 2 +- src/compiler/scala/reflect/internal/Types.scala | 1 + src/compiler/scala/reflect/runtime/Mirror.scala | 14 +-- .../scala/reflect/runtime/TreeBuildUtil.scala | 3 +- src/compiler/scala/tools/nsc/Global.scala | 2 +- src/compiler/scala/tools/nsc/ast/Reifiers.scala | 8 +- .../nsc/symtab/classfile/ClassfileParser.scala | 8 +- .../scala/tools/nsc/transform/Flatten.scala | 6 +- .../scala/tools/nsc/typechecker/Infer.scala | 4 +- .../scala/tools/nsc/typechecker/Macros.scala | 6 +- .../scala/tools/nsc/typechecker/Typers.scala | 2 +- src/library/scala/reflect/Manifest.scala | 2 +- src/library/scala/reflect/api/Mirror.scala | 32 ++++--- src/library/scala/reflect/api/Names.scala | 13 ++- .../scala/reflect/api/StandardDefinitions.scala | 19 ++-- src/library/scala/reflect/api/Symbols.scala | 101 ++++++++++----------- src/library/scala/reflect/api/TreeBuildUtil.scala | 14 +-- src/library/scala/reflect/api/TreePrinters.scala | 5 +- src/library/scala/reflect/api/Trees.scala | 10 +- src/library/scala/reflect/api/Types.scala | 22 ++--- src/library/scala/reflect/macro/Context.scala | 2 +- src/scalap/scala/tools/scalap/Classfiles.scala | 26 ------ test/files/run/reflection-implClass.scala | 16 ++-- test/files/run/t5423.scala | 4 +- 28 files changed, 178 insertions(+), 193 deletions(-) diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 6871822562..e05ac1087b 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -605,6 +605,9 @@ trait Definitions extends reflect.api.StandardDefinitions { def ClassType(arg: Type) = if (phase.erasedTypes || forMSIL) ClassClass.tpe else appliedType(ClassClass.typeConstructor, List(arg)) + + def vmClassType(arg: Type): Type = ClassType(arg) + def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! // // .NET backend diff --git a/src/compiler/scala/reflect/internal/Names.scala b/src/compiler/scala/reflect/internal/Names.scala index 907b564d4c..e6ca4c49ba 100644 --- a/src/compiler/scala/reflect/internal/Names.scala +++ b/src/compiler/scala/reflect/internal/Names.scala @@ -387,6 +387,13 @@ trait Names extends api.Names { * decode returns a String. */ + /** !!! Duplicative but consistently named. + */ + def decoded: String = decode + def encoded: String = "" + encode + // def decodedName: ThisNameType = newName(decoded) + def encodedName: ThisNameType = encode + /** Replace operator symbols by corresponding $op_name. */ def encode: ThisNameType = { val str = toString diff --git a/src/compiler/scala/reflect/internal/StdNames.scala b/src/compiler/scala/reflect/internal/StdNames.scala index 045daa7eb1..a072a2eebe 100644 --- a/src/compiler/scala/reflect/internal/StdNames.scala +++ b/src/compiler/scala/reflect/internal/StdNames.scala @@ -370,7 +370,7 @@ trait StdNames extends NameManglers { self: SymbolTable => val self: NameType = "self" val setAccessible: NameType = "setAccessible" val setAnnotations: NameType = "setAnnotations" - val setTypeSig: NameType = "setTypeSig" + val setTypeSignature: NameType = "setTypeSignature" val synchronized_ : NameType = "synchronized" val tail: NameType = "tail" val thisModuleType: NameType = "thisModuleType" diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index b9ba269ee3..819d94f41a 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -61,13 +61,18 @@ trait Symbols extends api.Symbols { self: SymbolTable => case n: TermName => newTermSymbol(n, pos, newFlags) case n: TypeName => newTypeSymbol(n, pos, newFlags) } - def typeSig: Type = info - def typeSigIn(site: Type): Type = site.memberInfo(this) + def enclosingClass: Symbol = enclClass + def enclosingMethod: Symbol = enclMethod + def thisPrefix: Type = thisType + def selfType: Type = typeOfThis + def typeSignature: Type = info + def typeSignatureIn(site: Type): Type = site memberInfo this + def asType: Type = tpe def asTypeIn(site: Type): Type = site.memberType(this) def asTypeConstructor: Type = typeConstructor def setInternalFlags(flag: Long): this.type = { setFlag(flag); this } - def setTypeSig(tpe: Type): this.type = { setInfo(tpe); this } + def setTypeSignature(tpe: Type): this.type = { setInfo(tpe); this } def setAnnotations(annots: AnnotationInfo*): this.type = { setAnnotations(annots.toList); this } } @@ -103,12 +108,17 @@ trait Symbols extends api.Symbols { self: SymbolTable => def pos = rawpos def setPos(pos: Position): this.type = { this.rawpos = pos; this } + /** !!! The logic after "hasFlag" is far too opaque to be unexplained. + * I'm guessing it's attempting to compensate for flag overloading, + * and embedding such logic in an undocumented island like this is a + * notarized guarantee of future breakage. + */ override def hasModifier(mod: Modifier) = hasFlag(flagOfModifier(mod)) && (!(mod == Modifier.bynameParameter) || isTerm) && (!(mod == Modifier.covariant) || isType) - override def allModifiers: Set[Modifier] = + override def modifiers: Set[Modifier] = Modifier.values filter hasModifier // ------ creators ------------------------------------------------------------------- @@ -1271,14 +1281,11 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** After the typer phase (before, look at the definition's Modifiers), contains * the annotations attached to member a definition (class, method, type, field). */ - def annotations: List[AnnotationInfo] = _annotations + def annotations: List[AnnotationInfo] = { + // Necessary for reflection, see SI-5423 + if (inReflexiveMirror) + initialize - /** This getter is necessary for reflection, see https://issues.scala-lang.org/browse/SI-5423 - * We could auto-inject completion into `annotations' and `setAnnotations', but I'm not sure about that - * @odersky writes: I fear we can't do the forcing for all compiler symbols as that could introduce cycles - */ - def getAnnotations: List[AnnotationInfo] = { - initialize _annotations } @@ -1572,10 +1579,10 @@ trait Symbols extends api.Symbols { self: SymbolTable => else owner.logicallyEnclosingMember /** The top-level class containing this symbol. */ - def toplevelClass: Symbol = + def enclosingTopLevelClass: Symbol = if (owner.isPackageClass) { if (isClass) this else moduleClass - } else owner.toplevelClass + } else owner.enclosingTopLevelClass /** Is this symbol defined in the same scope and compilation unit as `that` symbol? */ def isCoDefinedWith(that: Symbol) = ( @@ -1879,7 +1886,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => */ def sourceFile: AbstractFileType = if (isModule) moduleClass.sourceFile - else toplevelClass.sourceFile + else enclosingTopLevelClass.sourceFile def sourceFile_=(f: AbstractFileType) { abort("sourceFile_= inapplicable for " + this) @@ -2611,7 +2618,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def defString: String = toString override def locationString: String = "" override def enclClass: Symbol = this - override def toplevelClass: Symbol = this + override def enclosingTopLevelClass: Symbol = this override def enclMethod: Symbol = this override def sourceFile: AbstractFileType = null override def ownerChain: List[Symbol] = List() diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index f982c93656..6ce6a7fac0 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -74,7 +74,7 @@ trait Trees extends api.Trees { self: SymbolTable => override def hasModifier(mod: Modifier) = hasFlag(flagOfModifier(mod)) - override def allModifiers: Set[Modifier] = + override def modifiers: Set[Modifier] = Modifier.values filter hasModifier override def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers = Modifiers(flags, privateWithin, f(annotations)) setPositions positions diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 4e842c05da..cd44b700c1 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -261,6 +261,7 @@ trait Types extends api.Types { self: SymbolTable => def declarations = decls def typeArguments = typeArgs def erasedType = transformedType(this) + def substituteTypes(from: List[Symbol], to: List[Type]): Type = subst(from, to) } /** The base class for all types */ diff --git a/src/compiler/scala/reflect/runtime/Mirror.scala b/src/compiler/scala/reflect/runtime/Mirror.scala index 4808326902..028a660a35 100644 --- a/src/compiler/scala/reflect/runtime/Mirror.scala +++ b/src/compiler/scala/reflect/runtime/Mirror.scala @@ -12,28 +12,28 @@ class Mirror extends Universe with RuntimeTypes with TreeBuildUtil with ToolBoxe import definitions._ - def classWithName(name: String): Symbol = { + def symbolForName(name: String): Symbol = { val clazz = javaClass(name, defaultReflectiveClassLoader()) classToScala(clazz) } - def getCompanionObject(clazz: Symbol): AnyRef = { + def companionInstance(clazz: Symbol): AnyRef = { val singleton = ReflectionUtils.singletonInstance(clazz.fullName, defaultReflectiveClassLoader()) singleton } - def getClass(obj: AnyRef): Symbol = classToScala(obj.getClass) - def getType(obj: AnyRef): Type = typeToScala(obj.getClass) + def symbolOfInstance(obj: Any): Symbol = classToScala(obj.getClass) + def typeOfInstance(obj: Any): Type = typeToScala(obj.getClass) // to do add getClass/getType for instances of primitive types, probably like this: // def getClass[T <: AnyVal : Manifest](x: T): Symbol = manifest[T].getClass - def getValue(receiver: AnyRef, field: Symbol): Any = { + def getValueOfField(receiver: AnyRef, field: Symbol): Any = { fieldToJava(field).get(receiver) } - def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit = { + def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit = { fieldToJava(field).set(receiver, value) } - def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any = { + def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any = { if (meth.owner == ArrayClass) { meth.name match { case nme.length => return Array.getLength(receiver) diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala index fc4177e956..275c85f332 100644 --- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala +++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala @@ -2,7 +2,6 @@ package scala.reflect package runtime trait TreeBuildUtil extends Universe with api.TreeBuildUtil { - def staticClass(fullname: String): Symbol = definitions.getRequiredClass(fullname) def staticModule(fullname: String): Symbol = definitions.getRequiredModule(fullname) def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.thisType @@ -39,7 +38,7 @@ trait TreeBuildUtil extends Universe with api.TreeBuildUtil { selectIn(owner.info, idx) } - def freeVar(name: String, info: Type, value: Any) = newFreeVar(newTermName(name), info, value) + def newFreeVar(name: String, info: Type, value: Any) = newFreeVar(newTermName(name), info, value) def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers = Modifiers(flags, privateWithin, annotations) diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index d4152dffdc..18735cafe2 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -1076,7 +1076,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb def compiles(sym: Symbol): Boolean = if (sym == NoSymbol) false else if (symSource.isDefinedAt(sym)) true - else if (!sym.owner.isPackageClass) compiles(sym.toplevelClass) + else if (!sym.owner.isPackageClass) compiles(sym.enclosingTopLevelClass) else if (sym.isModuleClass) compiles(sym.sourceModule) else false diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala index ef87925959..ac6c8c4c77 100644 --- a/src/compiler/scala/tools/nsc/ast/Reifiers.scala +++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala @@ -160,8 +160,6 @@ trait Reifiers { self: Global => mirrorSelect("definitions.RootClass") else if (sym == EmptyPackage) mirrorSelect("definitions.EmptyPackage") - else if (sym == EmptyPackageClass) - mirrorSelect("definitions.EmptyPackageClass") else if (sym.isModuleClass) Select(reifySymRef(sym.sourceModule), "moduleClass") else if (sym.isStatic && sym.isClass) @@ -190,7 +188,7 @@ trait Reifiers { self: Global => val symtpe = lambdaLift.boxIfCaptured(sym, sym.tpe, erasedTypes = false) def markIfCaptured(arg: Ident): Tree = if (sym.isCapturedVariable) referenceCapturedVariable(arg) else arg - mirrorCall("freeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym))) + mirrorCall("newFreeVar", reify(sym.name.toString), reify(symtpe), markIfCaptured(Ident(sym))) } else { if (reifyDebug) println("Late local: " + sym) registerReifiableSymbol(sym) @@ -218,7 +216,7 @@ trait Reifiers { self: Global => * Generate code to add type and annotation info to a reified symbol */ private def fillInSymbol(sym: Symbol): Tree = { - val rset = Apply(Select(reifySymRef(sym), nme.setTypeSig), List(reifyType(sym.info))) + val rset = Apply(Select(reifySymRef(sym), nme.setTypeSignature), List(reifyType(sym.info))) if (sym.annotations.isEmpty) rset else Apply(Select(rset, nme.setAnnotations), List(reify(sym.annotations))) } @@ -335,7 +333,7 @@ trait Reifiers { self: Global => val sym = tree.symbol if (reifyDebug) println("This for %s, reified as freeVar".format(sym)) if (reifyDebug) println("Free: " + sym) - val freeVar = mirrorCall("freeVar", reify(sym.name.toString), reify(sym.tpe), This(sym)) + val freeVar = mirrorCall("newFreeVar", reify(sym.name.toString), reify(sym.tpe), This(sym)) mirrorCall(nme.Ident, freeVar) case This(_) => if (reifyDebug) println("This for %s, reified as This".format(tree.symbol)) diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 9c0670e981..a8083d7a2d 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -1269,13 +1269,13 @@ abstract class ClassfileParser { if ((jflags & (JAVA_ACC_PRIVATE | JAVA_ACC_PROTECTED | JAVA_ACC_PUBLIC)) == 0) // See ticket #1687 for an example of when topLevelClass is NoSymbol: it // apparently occurs when processing v45.3 bytecode. - if (sym.toplevelClass != NoSymbol) - sym.privateWithin = sym.toplevelClass.owner + if (sym.enclosingTopLevelClass != NoSymbol) + sym.privateWithin = sym.enclosingTopLevelClass.owner // protected in java means package protected. #3946 if ((jflags & JAVA_ACC_PROTECTED) != 0) - if (sym.toplevelClass != NoSymbol) - sym.privateWithin = sym.toplevelClass.owner + if (sym.enclosingTopLevelClass != NoSymbol) + sym.privateWithin = sym.enclosingTopLevelClass.owner } @inline private def isPrivate(flags: Int) = (flags & JAVA_ACC_PRIVATE) != 0 diff --git a/src/compiler/scala/tools/nsc/transform/Flatten.scala b/src/compiler/scala/tools/nsc/transform/Flatten.scala index 4fa5b52de3..89f1cc26e0 100644 --- a/src/compiler/scala/tools/nsc/transform/Flatten.scala +++ b/src/compiler/scala/tools/nsc/transform/Flatten.scala @@ -60,8 +60,8 @@ abstract class Flatten extends InfoTransform { private val flattened = new TypeMap { def apply(tp: Type): Type = tp match { case TypeRef(pre, sym, args) if isFlattenablePrefix(pre) => - assert(args.isEmpty && sym.toplevelClass != NoSymbol, sym.ownerChain) - typeRef(sym.toplevelClass.owner.thisType, sym, Nil) + assert(args.isEmpty && sym.enclosingTopLevelClass != NoSymbol, sym.ownerChain) + typeRef(sym.enclosingTopLevelClass.owner.thisType, sym, Nil) case ClassInfoType(parents, decls, clazz) => var parents1 = parents val decls1 = scopeTransform(clazz) { @@ -119,7 +119,7 @@ abstract class Flatten extends InfoTransform { val sym = tree.symbol val tree1 = tree match { case ClassDef(_, _, _, _) if sym.isNestedClass => - liftedDefs(sym.toplevelClass.owner) += tree + liftedDefs(sym.enclosingTopLevelClass.owner) += tree EmptyTree case Select(qual, name) if (sym.isStaticModule && !sym.owner.isPackageClass) => atPhase(phase.next) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index e8c03aff66..b97fbebec2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -228,9 +228,9 @@ trait Infer { if (sym.isError) { tree setSymbol sym setType ErrorType } else { - val topClass = context.owner.toplevelClass + val topClass = context.owner.enclosingTopLevelClass if (context.unit.exists) - context.unit.depends += sym.toplevelClass + context.unit.depends += sym.enclosingTopLevelClass var sym1 = sym filter (alt => context.isAccessible(alt, pre, site.isInstanceOf[Super])) // Console.println("check acc " + (sym, sym1) + ":" + (sym.tpe, sym1.tpe) + " from " + pre);//DEBUG diff --git a/src/compiler/scala/tools/nsc/typechecker/Macros.scala b/src/compiler/scala/tools/nsc/typechecker/Macros.scala index 4c790bfc34..b2ee36ee11 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Macros.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Macros.scala @@ -106,11 +106,11 @@ trait Macros { self: Analyzer => val mmeth = macroMeth(mac) if (mmeth == NoSymbol) None else { - val receiverClass: mirror.Symbol = mirror.classWithName(mmeth.owner.fullName) + val receiverClass: mirror.Symbol = mirror.symbolForName(mmeth.owner.fullName) val receiverObj = receiverClass.companionModule if (receiverObj == mirror.NoSymbol) None else { - val receiver = mirror.getCompanionObject(receiverClass) + val receiver = mirror.companionInstance(receiverClass) val rmeth = receiverObj.info.member(mirror.newTermName(mmeth.name.toString)) if (rmeth == mirror.NoSymbol) None else { @@ -140,7 +140,7 @@ trait Macros { self: Analyzer => } val rawArgs: Seq[Any] = rawArgss.flatten try { - Some(mirror.invoke(receiver, rmeth, rawArgs: _*)) + Some(mirror.invoke(receiver, rmeth)(rawArgs: _*)) } catch { case ex => val realex = ReflectionUtils.unwrapThrowable(ex) diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index eb0bed035c..bc8a8a31b5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -3714,7 +3714,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { return typed(treeCopy.Select(tree, qual1, name), mode, pt) } if (!reallyExists(sym)) { - if (context.owner.toplevelClass.isJavaDefined && name.isTypeName) { + if (context.owner.enclosingTopLevelClass.isJavaDefined && name.isTypeName) { val tree1 = atPos(tree.pos) { gen.convertToSelectFromType(qual, name) } if (tree1 != EmptyTree) return typed1(tree1, mode, pt) } diff --git a/src/library/scala/reflect/Manifest.scala b/src/library/scala/reflect/Manifest.scala index 8bd45c0e33..6c02878b19 100644 --- a/src/library/scala/reflect/Manifest.scala +++ b/src/library/scala/reflect/Manifest.scala @@ -222,7 +222,7 @@ object Manifest { val clazz = classToSymbol(erasure) val pre = prefix match { case Some(pm) => pm.tpe - case None => clazz.owner.thisType + case None => clazz.owner.thisPrefix } namedType(pre, clazz, typeArguments map (_.tpe)) } diff --git a/src/library/scala/reflect/api/Mirror.scala b/src/library/scala/reflect/api/Mirror.scala index 136f52b05f..448dca752c 100644 --- a/src/library/scala/reflect/api/Mirror.scala +++ b/src/library/scala/reflect/api/Mirror.scala @@ -3,57 +3,59 @@ package api /** A mirror establishes connections of * runtime entities such as class names and object instances - * with a refexive universe. + * with a reflexive universe. */ trait Mirror extends Universe with RuntimeTypes with TreeBuildUtil { /** The Scala class symbol that has given fully qualified name * @param name The fully qualified name of the class to be returned - * @throws java.lang.ClassNotFoundException if no class wiht that name exists + * @throws java.lang.ClassNotFoundException if no class with that name exists * to do: throws anything else? */ - def classWithName(name: String): Symbol + def symbolForName(name: String): Symbol - /** Return a reference to the companion object of this class symbol + /** Return a reference to the companion object of the given class symbol. */ - def getCompanionObject(clazz: Symbol): AnyRef + def companionInstance(clazz: Symbol): AnyRef - /** The Scala class symbol corresponding to the runtime class of given object - * @param The object from which the class is returned + /** The Scala class symbol corresponding to the runtime class of the given instance. + * @param instance The instance + * @return The class Symbol for the instance * @throws ? */ - def getClass(obj: AnyRef): Symbol + def symbolOfInstance(instance: Any): Symbol - /** The Scala type corresponding to the runtime type of given object. + /** The Scala type corresponding to the runtime type of given instance. * If the underlying class is parameterized, this will be an existential type, * with unknown type arguments. * - * @param The object from which the type is returned + * @param instance The instance. + * @return The Type of the given instance. * @throws ? */ - def getType(obj: AnyRef): Type + def typeOfInstance(instance: Any): Type /** The value of a field on a receiver instance. * @param receiver The receiver instance * @param field The field * @return The value contained in `receiver.field`. */ - def getValue(receiver: AnyRef, field: Symbol): Any + def getValueOfField(receiver: AnyRef, field: Symbol): Any /** Sets the value of a field on a receiver instance. * @param receiver The receiver instance * @param field The field * @param value The new value to be stored in the field. */ - def setValue(receiver: AnyRef, field: Symbol, value: Any): Unit + def setValueOfField(receiver: AnyRef, field: Symbol, value: Any): Unit - /** Invokes a method on a reciver instance with some arguments + /** Invokes a method on a receiver instance with some arguments * @param receiver The receiver instance * @param meth The method * @param args The method call's arguments * @return The result of invoking `receiver.meth(args)` */ - def invoke(receiver: AnyRef, meth: Symbol, args: Any*): Any + def invoke(receiver: AnyRef, meth: Symbol)(args: Any*): Any /** Maps a Java class to a Scala type reference * @param clazz The Java class object diff --git a/src/library/scala/reflect/api/Names.scala b/src/library/scala/reflect/api/Names.scala index 9498f0af36..3a00f21c8c 100755 --- a/src/library/scala/reflect/api/Names.scala +++ b/src/library/scala/reflect/api/Names.scala @@ -11,7 +11,6 @@ package api * `name1 == name2` implies `name1 eq name2`. */ trait Names { - /** The abstract type of names */ type Name >: Null <: AbsName @@ -37,12 +36,20 @@ trait Names { /** Replaces all occurrences of $op_names in this name by corresponding operator symbols. * Example: `foo_+=` becomes `foo_$plus$eq`. */ - def decode: String + def decoded: String /** Replaces all occurrences of operator symbols in this name by corresponding $op_names. * Example: `foo_$plus$eq` becomes `foo_+=` */ - def encode: Name + def encoded: String + + /** The decoded name, still represented as a name. + */ + def decodedName: Name + + /** The encoded name, still represented as a name. + */ + def encodedName: Name } /** Create a new term name. diff --git a/src/library/scala/reflect/api/StandardDefinitions.scala b/src/library/scala/reflect/api/StandardDefinitions.scala index 3526cf259d..e737b0ea4f 100755 --- a/src/library/scala/reflect/api/StandardDefinitions.scala +++ b/src/library/scala/reflect/api/StandardDefinitions.scala @@ -11,14 +11,11 @@ trait StandardDefinitions { self: Universe => val definitions: AbsDefinitions abstract class AbsDefinitions { - // outer packages and their classes - def RootPackage: Symbol // under consideration + // packages + def RootPackage: Symbol def RootClass: Symbol def EmptyPackage: Symbol - def EmptyPackageClass: Symbol - def ScalaPackage: Symbol - def ScalaPackageClass: Symbol // top types def AnyClass : Symbol @@ -54,17 +51,19 @@ trait StandardDefinitions { self: Universe => // fundamental modules def PredefModule: Symbol - // fundamental type constructions - def ClassType(arg: Type): Type + /** Given a type T, returns the type corresponding to the VM's + * representation: ClassClass's type constructor applied to `arg`. + */ + def vmClassType(arg: Type): Type // !!! better name? /** The string representation used by the given type in the VM. */ - def signature(tp: Type): String + def vmSignature(sym: Symbol, info: Type): String /** Is symbol one of the value classes? */ - def isValueClass(sym: Symbol): Boolean + def isValueClass(sym: Symbol): Boolean // !!! better name? /** Is symbol one of the numeric value classes? */ - def isNumericValueClass(sym: Symbol): Boolean + def isNumericValueClass(sym: Symbol): Boolean // !!! better name? } } diff --git a/src/library/scala/reflect/api/Symbols.scala b/src/library/scala/reflect/api/Symbols.scala index 65a3680fdd..15d754b5b4 100755 --- a/src/library/scala/reflect/api/Symbols.scala +++ b/src/library/scala/reflect/api/Symbols.scala @@ -9,12 +9,21 @@ trait Symbols { self: Universe => /** The modifiers of this symbol */ - def allModifiers: Set[Modifier] + def modifiers: Set[Modifier] /** Does this symbol have given modifier? */ def hasModifier(mod: Modifier): Boolean + /** A list of annotations attached to this Symbol. + */ + def annotations: List[self.AnnotationInfo] + + /** Whether this symbol carries an annotation for which the given + * symbol is its typeSymbol. + */ + def hasAnnotation(sym: Symbol): Boolean + /** The owner of this symbol. This is the symbol * that directly contains the current symbol's definition. * The `NoSymbol` symbol does not have an owner, and calling this method @@ -30,14 +39,6 @@ trait Symbols { self: Universe => */ def name: Name - /** The name of the symbol before decoding, e.g. `\$eq\$eq` instead of `==`. - */ - def encodedName: String - - /** The decoded name of the symbol, e.g. `==` instead of `\$eq\$eq`. - */ - def decodedName: String - /** The encoded full path name of this symbol, where outer names and inner names * are separated by periods. */ @@ -66,49 +67,43 @@ trait Symbols { self: Universe => * * The java access levels translate as follows: * - * java private: hasFlag(PRIVATE) && !hasAccessBoundary - * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosing package) - * java protected: hasFlag(PROTECTED) && (privateWithin == enclosing package) - * java public: !hasFlag(PRIVATE | PROTECTED) && !hasAccessBoundary + * java private: hasFlag(PRIVATE) && (privateWithin == NoSymbol) + * java package: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == enclosingPackage) + * java protected: hasFlag(PROTECTED) && (privateWithin == enclosingPackage) + * java public: !hasFlag(PRIVATE | PROTECTED) && (privateWithin == NoSymbol) */ def privateWithin: Symbol - /** Whether this symbol has a "privateWithin" visibility barrier attached. - */ - def hasAccessBoundary: Boolean - - /** A list of annotations attached to this Symbol. - */ - def getAnnotations: List[self.AnnotationInfo] - /** For a class: the module or case class factory with the same name in the same package. + * For a module: the class with the same name in the same package. * For all others: NoSymbol */ - def companionModule: Symbol - - /** For a module: the class with the same name in the same package. - * For all others: NoSymbol - */ - def companionClass: Symbol - - /** The module corresponding to this module class (note that this - * is not updated when a module is cloned), or NoSymbol if this is not a ModuleClass - */ - def sourceModule: Symbol + def companionSymbol: Symbol /** If symbol is an object definition, its implied associated class, * otherwise NoSymbol */ def moduleClass: Symbol // needed for LiftCode - /** The top-level class containing this symbol. */ - def toplevelClass: Symbol + /** If this symbol is a top-level class, this symbol; otherwise the next enclosing + * top-level class, or `NoSymbol` if none exists. + */ + def enclosingTopLevelClass: Symbol - /** The next enclosing class, or `NoSymbol` if none exists */ - def enclClass : Symbol + /** If this symbol is a class, this symbol; otherwise the next enclosing + * class, or `NoSymbol` if none exists. + */ + def enclosingClass: Symbol - /** The next enclosing method, or `NoSymbol` if none exists */ - def enclMethod : Symbol + /** If this symbol is a method, this symbol; otherwise the next enclosing + * method, or `NoSymbol` if none exists. + */ + def enclosingMethod: Symbol + + /** If this symbol is a package class, this symbol; otherwise the next enclosing + * package class, or `NoSymbol` if none exists. + */ + def enclosingPackageClass: Symbol /** Does this symbol represent the definition of term? * Note that every symbol is either a term or a type. @@ -141,13 +136,13 @@ trait Symbols { self: Universe => /** The type signature of this symbol. * Note if the symbol is a member of a class, one almost always is interested - * in `typeSigIn` with a site type instead. + * in `typeSignatureIn` with a site type instead. */ - def typeSig: Type + def typeSignature: Type // !!! Since one should almost never use this, let's give it a different name. /** The type signature of this symbol seen as a member of given type `site`. */ - def typeSigIn(site: Type): Type + def typeSignatureIn(site: Type): Type /** A type reference that refers to this type symbol * Note if symbol is a member of a class, one almost always is interested @@ -156,11 +151,11 @@ trait Symbols { self: Universe => * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol * `C`. Then `C.asType` is the type `C[T]`. * - * By contrast, `C.typeSig` would be a type signature of form + * By contrast, `C.typeSignature` would be a type signature of form * `PolyType(ClassInfoType(...))` that describes type parameters, value * parameters, parent types, and members of `C`. */ - def asType: Type + def asType: Type // !!! Same as typeSignature. /** A type reference that refers to this type symbol seen * as a member of given type `site`. @@ -172,37 +167,37 @@ trait Symbols { self: Universe => * are part of results of `asType`, but not of `asTypeConstructor`. * * Example: Given a class declaration `class C[T] { ... } `, that generates a symbol - * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeCponstructor` is `C`. + * `C`. Then `C.asType` is the type `C[T]`, but `C.asTypeConstructor` is `C`. */ def asTypeConstructor: Type // needed by LiftCode + + /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`. + */ + def thisPrefix: Type /** If this symbol is a class or trait, its self type, otherwise the type * of the symbol itself. */ - def typeOfThis: Type - - /** If this symbol is a class, the type `C.this`, otherwise `NoPrefix`. - */ - def thisType: Type + def selfType: Type /** A fresh symbol with given name `name`, position `pos` and flags `flags` that has * the current symbol as its owner. */ def newNestedSymbol(name: Name, pos: Position, flags: Long): Symbol // needed by LiftCode - + /** Low-level operation to set the symbol's flags * @return the symbol itself */ - def setInternalFlags(flags: Long): this.type // needed by LiftCode + def setInternalFlags(flags: Long): this.type // needed by LiftCode !!! not enough reason to have in the api /** Set symbol's type signature to given type * @return the symbol itself */ - def setTypeSig(tpe: Type): this.type // needed by LiftCode + def setTypeSignature(tpe: Type): this.type // needed by LiftCode !!! not enough reason to have in the api /** Set symbol's annotations to given annotations `annots`. */ - def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode + def setAnnotations(annots: AnnotationInfo*): this.type // needed by LiftCode !!! not enough reason to have in the api } val NoSymbol: Symbol diff --git a/src/library/scala/reflect/api/TreeBuildUtil.scala b/src/library/scala/reflect/api/TreeBuildUtil.scala index b437824925..f28008bc21 100644 --- a/src/library/scala/reflect/api/TreeBuildUtil.scala +++ b/src/library/scala/reflect/api/TreeBuildUtil.scala @@ -3,19 +3,19 @@ package scala.reflect.api trait TreeBuildUtil extends Universe { /** The symbol corresponding to the globally accessible class with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def staticClass(fullname: String): Symbol + def staticClass(fullName: String): Symbol /** The symbol corresponding to the globally accessible object with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def staticModule(fullname: String): Symbol + def staticModule(fullName: String): Symbol /** The this-ptype of the globally accessible object with the - * given fully qualified name `fullname`. + * given fully qualified name `fullName`. */ - def thisModuleType(fullname: String): Type + def thisModuleType(fullName: String): Type /** Selects type symbol with given simple name `name` from the defined members of `owner`. */ @@ -38,7 +38,7 @@ trait TreeBuildUtil extends Universe { * @param tsig the type signature of the free variable * @param value the value of the free variable at runtime */ - def freeVar(name: String, tsig: Type, value: Any): Symbol + def newFreeVar(name: String, info: Type, value: Any): Symbol /** Create a Modiiers structure given internal flags, qualifier, annotations */ def modifiersFromInternalFlags(flags: Long, privateWithin: Name, annotations: List[Tree]): Modifiers diff --git a/src/library/scala/reflect/api/TreePrinters.scala b/src/library/scala/reflect/api/TreePrinters.scala index 70a100015b..19bfd09b81 100644 --- a/src/library/scala/reflect/api/TreePrinters.scala +++ b/src/library/scala/reflect/api/TreePrinters.scala @@ -31,7 +31,6 @@ trait TreePrinters { self: Universe => // emits more or less verbatim representation of the provided tree // todo. when LiftCode becomes a macro, throw this code away and use that macro class RawTreePrinter(out: PrintWriter) extends TreePrinter { - def print(args: Any*): Unit = args foreach { case EmptyTree => print("EmptyTree") @@ -66,14 +65,14 @@ trait TreePrinters { self: Universe => print(")") case mods: Modifiers => val parts = collection.mutable.ListBuffer[String]() - parts += "Set(" + mods.allModifiers.map(_.sourceString).mkString(", ") + ")" + parts += "Set(" + mods.modifiers.map(_.sourceString).mkString(", ") + ")" parts += "newTypeName(\"" + mods.privateWithin.toString + "\")" parts += "List(" + mods.annotations.map{showRaw}.mkString(", ") + ")" var keep = 3 if (keep == 3 && mods.annotations.isEmpty) keep -= 1 if (keep == 2 && mods.privateWithin == EmptyTypeName) keep -= 1 - if (keep == 1 && mods.allModifiers.isEmpty) keep -= 1 + if (keep == 1 && mods.modifiers.isEmpty) keep -= 1 print("Modifiers(", parts.take(keep).mkString(", "), ")") case name: Name => diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index 3b48a02949..32940cbcd6 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -16,8 +16,8 @@ trait Trees { self: Universe => type Modifiers <: AbsModifiers abstract class AbsModifiers { + def modifiers: Set[Modifier] def hasModifier(mod: Modifier): Boolean - def allModifiers: Set[Modifier] def privateWithin: Name // default: EmptyTypeName def annotations: List[Tree] // default: List() def mapAnnotations(f: List[Tree] => List[Tree]): Modifiers @@ -483,7 +483,7 @@ trait Trees { self: Universe => assert(!argss.isEmpty) // todo. we need to expose names in scala.reflect.api // val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) - val superRef: Tree = Select(New(tpt), "") + val superRef: Tree = Select(New(tpt), nme.CONSTRUCTOR) (superRef /: argss) (Apply) } @@ -1140,9 +1140,9 @@ trait Trees { self: Universe => abstract class Transformer { val treeCopy: TreeCopier = newLazyTreeCopier protected var currentOwner: Symbol = definitions.RootClass - protected def currentMethod = currentOwner.enclMethod - protected def currentClass = currentOwner.enclClass - protected def currentPackage = currentOwner.toplevelClass.owner + protected def currentMethod = currentOwner.enclosingMethod + protected def currentClass = currentOwner.enclosingClass + protected def currentPackage = currentOwner.enclosingTopLevelClass.owner def transform(tree: Tree): Tree = tree match { case EmptyTree => tree diff --git a/src/library/scala/reflect/api/Types.scala b/src/library/scala/reflect/api/Types.scala index 6185a788ae..8a91956320 100755 --- a/src/library/scala/reflect/api/Types.scala +++ b/src/library/scala/reflect/api/Types.scala @@ -6,7 +6,6 @@ trait Types { self: Universe => /** This class declares operations that are visible in a Type. */ abstract class AbsType { - /** The type symbol associated with the type, or `NoSymbol` for types * that do not refer to a type symbol. */ @@ -47,7 +46,7 @@ trait Types { self: Universe => /** Substitute types in `to` for corresponding occurrences of references to * symbols `from` in this type. */ - def subst(from: List[Symbol], to: List[Type]): Type + def substituteTypes(from: List[Symbol], to: List[Type]): Type // !!! Too many things with names like "subst" /** If this is a parameterized types, the type arguments. * Otherwise the empty list @@ -56,7 +55,7 @@ trait Types { self: Universe => /** Is this type a type constructor that is missing its type arguments? */ - def isHigherKinded: Boolean + def isHigherKinded: Boolean // !!! This should be called "isTypeConstructor", no? /** * Expands type aliases and converts higher-kinded TypeRefs to PolyTypes. @@ -66,7 +65,7 @@ trait Types { self: Universe => * TypeRef(pre, , List()) is replaced by * PolyType(X, TypeRef(pre, , List(X))) */ - def normalize: Type + def normalize: Type // !!! Alternative name? "normalize" is used to mean too many things. /** Does this type conform to given type argument `that`? */ def <:< (that: Type): Boolean @@ -74,11 +73,11 @@ trait Types { self: Universe => /** Is this type equivalent to given type argument `that`? */ def =:= (that: Type): Boolean - /** The list of all baseclasses of this type (including its own typeSymbol) + /** The list of all base classes of this type (including its own typeSymbol) * in reverse linearization order, starting with the class itself and ending * in class Any. */ - def baseClasses: List[Symbol] + def baseClasses: List[Symbol] // !!! Alternative name, perhaps linearization? /** The least type instance of given class which is a supertype * of this type. Example: @@ -104,9 +103,9 @@ trait Types { self: Universe => def asSeenFrom(pre: Type, clazz: Symbol): Type /** The erased type corresponding to this type after - * all transcformations from Scala to Java have been performed. + * all transformations from Scala to Java have been performed. */ - def erasedType: Type + def erasedType: Type // !!! "erasedType", compare with "widen" (so "erase") or "underlying" (so "erased") /** Apply `f` to each part of this type, returning * a new type. children get mapped before their parents */ @@ -138,7 +137,7 @@ trait Types { self: Universe => /** If this is a singleton type, widen it to its nearest underlying non-singleton * base type by applying one or more `underlying` dereferences. - * If this is not a singlecon type, returns this type itself. + * If this is not a singleton type, returns this type itself. * * Example: * @@ -400,11 +399,6 @@ trait Types { self: Universe => def unapply(tpe: ClassInfoType): Option[(List[Type], Scope, Symbol)] } - - - - - abstract class NullaryMethodTypeExtractor { def apply(resultType: Type): NullaryMethodType def unapply(tpe: NullaryMethodType): Option[(Type)] diff --git a/src/library/scala/reflect/macro/Context.scala b/src/library/scala/reflect/macro/Context.scala index 3b6f96d7a8..ebbd4735e5 100644 --- a/src/library/scala/reflect/macro/Context.scala +++ b/src/library/scala/reflect/macro/Context.scala @@ -19,7 +19,7 @@ trait Context extends api.Universe { * * The reifier transforms it to the following tree: * - * $mr.Apply($mr.Select($mr.Ident($mr.freeVar("x", , x), "+"), List($mr.Literal($mr.Constant(1)))))) + * $mr.Apply($mr.Select($mr.Ident($mr.newFreeVar("x", , x), "+"), List($mr.Literal($mr.Constant(1)))))) * * The transformation looks mostly straightforward, but it has its tricky parts: * * Reifier retains symbols and types defined outside the reified tree, however diff --git a/src/scalap/scala/tools/scalap/Classfiles.scala b/src/scalap/scala/tools/scalap/Classfiles.scala index 72b3824157..2cbeaa945f 100644 --- a/src/scalap/scala/tools/scalap/Classfiles.scala +++ b/src/scalap/scala/tools/scalap/Classfiles.scala @@ -41,31 +41,5 @@ object Classfiles { CONSTANT_INTFMETHODREF -> "InterfaceMethod", CONSTANT_NAMEANDTYPE -> "NameAndType" ) - - final val BAD_ATTR = 0x00000 - final val SOURCEFILE_ATTR = 0x00001 - final val SYNTHETIC_ATTR = 0x00002 - final val DEPRECATED_ATTR = 0x00004 - final val CODE_ATTR = 0x00008 - final val EXCEPTIONS_ATTR = 0x00010 - final val CONSTANT_VALUE_ATTR = 0x00020 - final val LINE_NUM_TABLE_ATTR = 0x00040 - final val LOCAL_VAR_TABLE_ATTR = 0x00080 - final val INNERCLASSES_ATTR = 0x08000 - final val META_ATTR = 0x10000 - final val SCALA_ATTR = 0x20000 - - final val SOURCEFILE_N = "SourceFile" - final val SYNTHETIC_N = "Synthetic" - final val DEPRECATED_N = "Deprecated" - final val CODE_N = "Code" - final val EXCEPTIONS_N = "Exceptions" - final val CONSTANT_VALUE_N = "ConstantValue" - final val LINE_NUM_TABLE_N = "LineNumberTable" - final val LOCAL_VAR_TABLE_N = "LocalVariableTable" - final val INNERCLASSES_N = "InnerClasses" - final val META_N = "JacoMeta" - final val SCALA_N = "ScalaSignature" - final val CONSTR_N = "" } diff --git a/test/files/run/reflection-implClass.scala b/test/files/run/reflection-implClass.scala index 2b30e29bb3..7718b52f33 100644 --- a/test/files/run/reflection-implClass.scala +++ b/test/files/run/reflection-implClass.scala @@ -8,19 +8,19 @@ object Test extends App with Outer { import scala.reflect.mirror - assert(mirror.classToSymbol(manifest[Foo].erasure).typeSig.declaration(mirror.newTermName("bar")).typeSig == - mirror.classToSymbol(manifest[Bar].erasure).typeSig.declaration(mirror.newTermName("foo")).typeSig) + assert(mirror.classToSymbol(manifest[Foo].erasure).typeSignature.declaration(mirror.newTermName("bar")).typeSignature == + mirror.classToSymbol(manifest[Bar].erasure).typeSignature.declaration(mirror.newTermName("foo")).typeSignature) val s1 = implClass(manifest[Foo].erasure) assert(s1 != mirror.NoSymbol) - assert(s1.typeSig != mirror.NoType) - assert(s1.companionModule.typeSig != mirror.NoType) - assert(s1.companionModule.typeSig.declaration(mirror.newTermName("bar")) != mirror.NoSymbol) + assert(s1.typeSignature != mirror.NoType) + assert(s1.companionSymbol.typeSignature != mirror.NoType) + assert(s1.companionSymbol.typeSignature.declaration(mirror.newTermName("bar")) != mirror.NoSymbol) val s2 = implClass(manifest[Bar].erasure) assert(s2 != mirror.NoSymbol) - assert(s2.typeSig != mirror.NoType) - assert(s2.companionModule.typeSig != mirror.NoType) - assert(s2.companionModule.typeSig.declaration(mirror.newTermName("foo")) != mirror.NoSymbol) + assert(s2.typeSignature != mirror.NoType) + assert(s2.companionSymbol.typeSignature != mirror.NoType) + assert(s2.companionSymbol.typeSignature.declaration(mirror.newTermName("foo")) != mirror.NoSymbol) def implClass(clazz: Class[_]) = { val implClass = Class.forName(clazz.getName + "$class") mirror.classToSymbol(implClass) diff --git a/test/files/run/t5423.scala b/test/files/run/t5423.scala index 2139773ff1..fc507c417b 100644 --- a/test/files/run/t5423.scala +++ b/test/files/run/t5423.scala @@ -6,7 +6,7 @@ import scala.reflect.Code final class table extends StaticAnnotation @table class A -object Test extends App{ +object Test extends App { val s = classToSymbol(classOf[A]) - println(s.getAnnotations) + println(s.annotations) } -- cgit v1.2.3 From 5be13bf0e0a2df0ddeda7652db0f54c0cba6c3bf Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 5 Feb 2012 22:38:20 -0800 Subject: Revert "Scaladoc @usecase annotation overriding / SI-5287" This reverts commit 7946ac410ad74894cd0eb6dfd29447f173911b99. --- src/compiler/scala/tools/nsc/util/DocStrings.scala | 26 +-- .../resources/implicit-inheritance-override.scala | 41 ----- .../resources/implicit-inheritance-usecase.scala | 57 ------- test/scaladoc/scala/html/HtmlFactoryTest.scala | 189 ++++----------------- 4 files changed, 36 insertions(+), 277 deletions(-) delete mode 100644 test/scaladoc/resources/implicit-inheritance-override.scala delete mode 100644 test/scaladoc/resources/implicit-inheritance-usecase.scala diff --git a/src/compiler/scala/tools/nsc/util/DocStrings.scala b/src/compiler/scala/tools/nsc/util/DocStrings.scala index 2c8b77be71..1db6c38b4d 100755 --- a/src/compiler/scala/tools/nsc/util/DocStrings.scala +++ b/src/compiler/scala/tools/nsc/util/DocStrings.scala @@ -71,35 +71,13 @@ object DocStrings { * Every section starts with a `@` and extends to the next `@`, or * to the end of the comment string, but excluding the final two * characters which terminate the comment. - * - * Also take usecases into account - they need to expand until the next - * @usecase or the end of the string, as they might include other sections - * of their own */ def tagIndex(str: String, p: Int => Boolean = (idx => true)): List[(Int, Int)] = findAll(str, 0) (idx => str(idx) == '@' && p(idx)) match { case List() => List() - case idxs => { - val idxs2 = mergeUsecaseSections(str, idxs) - idxs2 zip (idxs2.tail ::: List(str.length - 2)) - } - } - - /** - * Merge sections following an @usecase into the usecase comment, so they - * can override the parent symbol's sections - */ - def mergeUsecaseSections(str: String, idxs: List[Int]): List[Int] = { - idxs.find(str.substring(_).startsWith("@usecase")) match { - case Some(firstUC) => - val commentSections = idxs.take(idxs.indexOf(firstUC)) - val usecaseSections = idxs.drop(idxs.indexOf(firstUC)).filter(str.substring(_).startsWith("@usecase")) - commentSections ::: usecaseSections - case None => - idxs + case idxs => idxs zip (idxs.tail ::: List(str.length - 2)) } - } - + /** Does interval `iv` start with given `tag`? */ def startsWithTag(str: String, section: (Int, Int), tag: String): Boolean = diff --git a/test/scaladoc/resources/implicit-inheritance-override.scala b/test/scaladoc/resources/implicit-inheritance-override.scala deleted file mode 100644 index 85b8e8d543..0000000000 --- a/test/scaladoc/resources/implicit-inheritance-override.scala +++ /dev/null @@ -1,41 +0,0 @@ -// This tests the implicit comment inheritance capabilities of scaladoc for class inheritance (no $super, no @inheritdoc) -class Base { - /** - * The base comment. And another sentence... - * - * @param arg1 The T term comment - * @param arg2 The string comment - * @tparam T the type of the first argument - * @return The return comment - */ - def function[T](arg1: T, arg2: String): Double = 0.0d -} - -class DerivedA extends Base { - /** - * Overriding the comment, the params and returns comments should stay the same. - */ - override def function[T](arg1: T, arg2: String): Double = 1.0d -} - -class DerivedB extends Base { - /** - * @param arg1 The overridden T term comment - * @param arg2 The overridden string comment - */ - override def function[T](arg1: T, arg2: String): Double = 2.0d -} - -class DerivedC extends Base { - /** - * @return The overridden return comment - */ - override def function[T](arg1: T, arg2: String): Double = 3.0d -} - -class DerivedD extends Base { - /** - * @tparam T The overriden type parameter comment - */ - override def function[T](arg1: T, arg2: String): Double = 3.0d -} \ No newline at end of file diff --git a/test/scaladoc/resources/implicit-inheritance-usecase.scala b/test/scaladoc/resources/implicit-inheritance-usecase.scala deleted file mode 100644 index 8dd1262e4b..0000000000 --- a/test/scaladoc/resources/implicit-inheritance-usecase.scala +++ /dev/null @@ -1,57 +0,0 @@ -// This tests the implicit comment inheritance capabilities of scaladoc for usecases (no $super, no @inheritdoc) -/** Testing use case inheritance */ -class UseCaseInheritance { - /** - * The base comment. And another sentence... - * - * @param arg1 The T term comment - * @param arg2 The string comment - * @tparam T The type parameter - * @return The return comment - * - * @usecase def missing_arg[T](arg1: T): Double - * - * @usecase def missing_targ(arg1: Int, arg2: String): Double - * - * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double - * @param arg1 The overridden T term comment - * - * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double - * @tparam T The overridden type parameter comment - * - * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double - * @return The overridden return comment - * - * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double - * @param arg3 The added float comment - * - * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double - * The overridden comment. - */ - def function[T](implicit arg1: T, arg2: String): Double = 0.0d -} - -/** Testing the override-use case interaction */ -class UseCaseOverrideInheritance extends UseCaseInheritance { - /** - * @usecase def missing_arg[T](arg1: T): Double - * - * @usecase def missing_targ(arg1: Int, arg2: String): Double - * - * @usecase def overridden_arg1[T](implicit arg1: T, arg2: String): Double - * @param arg1 The overridden T term comment - * - * @usecase def overridden_targ[T](implicit arg1: T, arg2: String): Double - * @tparam T The overridden type parameter comment - * - * @usecase def overridden_return[T](implicit arg1: T, arg2: String): Double - * @return The overridden return comment - * - * @usecase def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double - * @param arg3 The added float comment - * - * @usecase def overridden_comment[T](implicit arg1: T, arg2: String): Double - * The overridden comment. - */ - override def function[T](implicit arg1: T, arg2: String): Double = 0.0d -} diff --git a/test/scaladoc/scala/html/HtmlFactoryTest.scala b/test/scaladoc/scala/html/HtmlFactoryTest.scala index 37aa302ac7..e2687dd510 100644 --- a/test/scaladoc/scala/html/HtmlFactoryTest.scala +++ b/test/scaladoc/scala/html/HtmlFactoryTest.scala @@ -84,7 +84,12 @@ object Test extends Properties("HtmlFactory") { val html = scala.stripSuffix(".scala") + ".html" createTemplates(scala)(html) } - + + /** + * See checkTextOnly(scalaFile: String, checks: List[String]) + */ + def checkText1(scalaFile: String, check: String, debug: Boolean = true): Boolean = checkText(scalaFile, List(check), debug) + /** * This tests the text without the markup - ex: * @@ -106,31 +111,20 @@ object Test extends Properties("HtmlFactory") { * * NOTE: Comparison is done ignoring all whitespace */ - def checkText(scalaFile: String, debug: Boolean = true)(checks: (Option[String], String, Boolean)*): Boolean = { + def checkText(scalaFile: String, checks: List[String], debug: Boolean = true): Boolean = { val htmlFile = scalaFile.stripSuffix(".scala") + ".html" - val htmlAllFiles = createTemplates(scalaFile) + val htmlText = createTemplates(scalaFile)(htmlFile).text.replace('→',' ').replaceAll("\\s+","") var result = true - for ((fileHint, check, expected) <- checks) { - // resolve the file to be checked - val fileName = fileHint match { - case Some(file) => - if (file endsWith ".html") - file - else - file + ".html" - case None => - htmlFile - } - val fileText = htmlAllFiles(fileName).text.replace('→',' ').replaceAll("\\s+","") - val checkText = check.replace('→',' ').replaceAll("\\s+","") - val checkValue = fileText.contains(checkText) == expected - if (debug && (!checkValue)) { - Console.err.println("Check failed: ") - Console.err.println("HTML: " + fileText) - Console.err.println("Check: " + checkText) - } - result &&= checkValue + for (check <- checks) { + val checkText = check.replace('→',' ').replaceAll("\\s+","") + val checkValue = htmlText.contains(checkText) + if (debug && (!checkValue)) { + Console.err.println("Check failed: ") + Console.err.println("HTML: " + htmlText) + Console.err.println("Check: " + checkText) + } + result &&= checkValue } result @@ -432,155 +426,40 @@ object Test extends Properties("HtmlFactory") { createTemplate("SI_4898.scala") true } - + property("Use cases should override their original members") = - checkText("SI_5054_q1.scala")( - (None,"""def test(): Int""", true), - (None,"""def test(implicit lost: Int): Int""", false) - ) + checkText1("SI_5054_q1.scala", """def test(): Int""") && + !checkText1("SI_5054_q1.scala", """def test(implicit lost: Int): Int""") + property("Use cases should keep their flags - final should not be lost") = - checkText("SI_5054_q2.scala")((None, """final def test(): Int""", true)) + checkText1("SI_5054_q2.scala", """final def test(): Int""") property("Use cases should keep their flags - implicit should not be lost") = - checkText("SI_5054_q3.scala")((None, """implicit def test(): Int""", true)) - + checkText1("SI_5054_q3.scala", """implicit def test(): Int""") + property("Use cases should keep their flags - real abstract should not be lost") = - checkText("SI_5054_q4.scala")((None, """abstract def test(): Int""", true)) + checkText1("SI_5054_q4.scala", """abstract def test(): Int""") property("Use cases should keep their flags - traits should not be affected") = - checkText("SI_5054_q5.scala")((None, """def test(): Int""", true)) + checkText1("SI_5054_q5.scala", """def test(): Int""") property("Use cases should keep their flags - traits should not be affected") = - checkText("SI_5054_q6.scala")((None, """abstract def test(): Int""", true)) + checkText1("SI_5054_q6.scala", """abstract def test(): Int""") property("Use case individual signature test") = - checkText("SI_5054_q7.scala")( - (None, """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", true), - (None, """abstract def test1(): Int [use case] This takes the implicit value in scope.""", true) - ) + checkText("SI_5054_q7.scala", List( + """abstract def test2(explicit: Int): Int [use case] This takes the explicit value passed.""", + """abstract def test1(): Int [use case] This takes the implicit value in scope.""")) property("Display correct \"Definition classes\"") = - checkText("SI_5287.scala")( - (None, - """def method(): Int + checkText1("SI_5287.scala", + """def method(): Int [use case] The usecase explanation [use case] The usecase explanation - Definition Classes SI_5287 SI_5287_B SI_5287_A""", true) - ) // the explanation appears twice, as small comment and full comment + Definition Classes SI_5287 SI_5287_B SI_5287_A""", debug=true) + // explanation appears twice, as small comment and full comment - - property("Correct comment inheritance for overriding") = - checkText("implicit-inheritance-override.scala")( - (Some("Base"), - """def function[T](arg1: T, arg2: String): Double - The base comment. - The base comment. And another sentence... - T the type of the first argument - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true), - (Some("DerivedA"), - """def function[T](arg1: T, arg2: String): Double - Overriding the comment, the params and returns comments should stay the same. - Overriding the comment, the params and returns comments should stay the same. - T the type of the first argument - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true), - (Some("DerivedB"), - """def function[T](arg1: T, arg2: String): Double - T the type of the first argument - arg1 The overridden T term comment - arg2 The overridden string comment - returns The return comment - """, true), - (Some("DerivedC"), - """def function[T](arg1: T, arg2: String): Double - T the type of the first argument - arg1 The T term comment - arg2 The string comment - returns The overridden return comment - """, true), - (Some("DerivedD"), - """def function[T](arg1: T, arg2: String): Double - T The overriden type parameter comment - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true) - ) - - for (useCaseFile <- List("UseCaseInheritance", "UseCaseOverrideInheritance")) { - property("Correct comment inheritance for usecases") = - checkText("implicit-inheritance-usecase.scala")( - (Some(useCaseFile), - """def missing_arg[T](arg1: T): Double - [use case] - [use case] - T The type parameter - arg1 The T term comment - returns The return comment - """, true), - (Some(useCaseFile), - """def missing_targ(arg1: Int, arg2: String): Double - [use case] - [use case] - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true), - (Some(useCaseFile), - """def overridden_arg1[T](implicit arg1: T, arg2: String): Double - [use case] - [use case] - T The type parameter - arg1 The overridden T term comment - arg2 The string comment - returns The return comment - """, true), - (Some(useCaseFile), - """def overridden_targ[T](implicit arg1: T, arg2: String): Double - [use case] - [use case] - T The overridden type parameter comment - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true), - (Some(useCaseFile), - """def overridden_return[T](implicit arg1: T, arg2: String): Double - [use case] - [use case] - T The type parameter - arg1 The T term comment - arg2 The string comment - returns The overridden return comment - """, true), - (Some(useCaseFile), - """def added_arg[T](implicit arg1: T, arg2: String, arg3: Float): Double - [use case] - [use case] - T The type parameter - arg1 The T term comment - arg2 The string comment - arg3 The added float comment - returns The return comment - """, true), - (Some(useCaseFile), - """def overridden_comment[T](implicit arg1: T, arg2: String): Double - [use case] The overridden comment. - [use case] The overridden comment. - T The type parameter - arg1 The T term comment - arg2 The string comment - returns The return comment - """, true) - ) - } - { val files = createTemplates("basic.scala") //println(files) -- cgit v1.2.3 From 28b2d0c13aba7156431076119c920c7dcee6dc77 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sun, 5 Feb 2012 22:39:08 -0800 Subject: Revert "Added a rootdoc page for the compiler API scaladoc" This reverts commit e34098b7f6e37420198fa5c7c2820d0443b46cc4. --- build.xml | 5 ++-- src/compiler/rootdoc.txt | 6 ----- .../scala/tools/nsc/doc/html/HtmlFactory.scala | 4 +++- .../tools/nsc/doc/html/resource/lib/rootdoc.txt | 27 ++++++++++++++++++++++ src/library/rootdoc.txt | 27 ---------------------- 5 files changed, 32 insertions(+), 37 deletions(-) delete mode 100644 src/compiler/rootdoc.txt create mode 100644 src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt delete mode 100644 src/library/rootdoc.txt diff --git a/build.xml b/build.xml index e65b66219b..57d2eed1c0 100644 --- a/build.xml +++ b/build.xml @@ -1530,7 +1530,7 @@ DOCUMENTATION docUncompilable="${src.dir}/library-aux" sourcepath="${src.dir}" classpathref="pack.classpath" - docRootContent="${src.dir}/library/rootdoc.txt"> + docRootContent="${build-docs.dir}/library/lib/rootdoc.txt"> @@ -1613,8 +1613,7 @@ DOCUMENTATION docsourceurl="https://lampsvn.epfl.ch/trac/scala/browser/scala/trunk/src/€{FILE_PATH}.scala#L1" sourcepath="${src.dir}" classpathref="pack.classpath" - srcdir="${src.dir}/compiler" - docRootContent="${src.dir}/compiler/rootdoc.txt"> + srcdir="${src.dir}/compiler"> diff --git a/src/compiler/rootdoc.txt b/src/compiler/rootdoc.txt deleted file mode 100644 index 173f604098..0000000000 --- a/src/compiler/rootdoc.txt +++ /dev/null @@ -1,6 +0,0 @@ -The Scala compiler API. - -The following resources are useful for Scala plugin/compiler development: - - [[http://www.scala-lang.org/node/215 Scala development tutorials]] on [[http://www.scala-lang.org www.scala-lang.org]] - - [[https://wiki.scala-lang.org/display/SIW/ Scala Internals wiki]] - - [[http://lampwww.epfl.ch/~magarcia/ScalaCompilerCornerReloaded/ Scala compiler corner]], maintained by Miguel diff --git a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala index 4f05678d85..c21507ef45 100644 --- a/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala +++ b/src/compiler/scala/tools/nsc/doc/html/HtmlFactory.scala @@ -80,7 +80,9 @@ class HtmlFactory(val universe: doc.Universe, index: doc.Index) { "selected.png", "selected2-right.png", "selected2.png", - "unselected.png" + "unselected.png", + + "rootdoc.txt" ) /** Generates the Scaladoc site for a model into the site root. diff --git a/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt b/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt new file mode 100644 index 0000000000..6145429f1e --- /dev/null +++ b/src/compiler/scala/tools/nsc/doc/html/resource/lib/rootdoc.txt @@ -0,0 +1,27 @@ +This is the documentation for the Scala standard library. + +== Package structure == + +The [[scala]] package contains core types. + +scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation. + +Other important packages include: + + - scala.[[scala.actors]] - Concurrency framework inspired by Erlang. + - scala.[[scala.io]] - Input and output. + - scala.[[scala.math]] - Basic math functions and additional numeric types. + - scala.[[scala.sys]] - Interaction with other processes and the operating system. + - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions. + - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing. + - scala.[[scala.xml]] - XML parsing, manipulation, and serialization. + +Many other packages exist. See the complete list on the left. + +== Automatic imports == + +Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default. + +Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]]. + +Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String. diff --git a/src/library/rootdoc.txt b/src/library/rootdoc.txt deleted file mode 100644 index 6145429f1e..0000000000 --- a/src/library/rootdoc.txt +++ /dev/null @@ -1,27 +0,0 @@ -This is the documentation for the Scala standard library. - -== Package structure == - -The [[scala]] package contains core types. - -scala.[[scala.collection]] and its subpackages contain a collections framework with higher-order functions for manipulation. Both [[scala.collection.immutable]] and [[scala.collection.mutable]] data structures are available, with immutable as the default. The [[scala.collection.parallel]] collections provide automatic parallel operation. - -Other important packages include: - - - scala.[[scala.actors]] - Concurrency framework inspired by Erlang. - - scala.[[scala.io]] - Input and output. - - scala.[[scala.math]] - Basic math functions and additional numeric types. - - scala.[[scala.sys]] - Interaction with other processes and the operating system. - - scala.util.[[scala.util.matching]] - Pattern matching in text using regular expressions. - - scala.util.parsing.[[scala.util.parsing.combinator]] - Composable combinators for parsing. - - scala.[[scala.xml]] - XML parsing, manipulation, and serialization. - -Many other packages exist. See the complete list on the left. - -== Automatic imports == - -Identifiers in the scala package and the [[scala.Predef]] object are always in scope by default. - -Some of these identifiers are type aliases provided as shortcuts to commonly used classes. For example, List is an alias for scala.collection.immutable.[[scala.collection.immutable.List]]. - -Other aliases refer to classes providing by the underlying platform. For example, on the JVM, String is an alias for java.lang.String. -- cgit v1.2.3 From 0c73665be747937a80a8178268782a470f0b3413 Mon Sep 17 00:00:00 2001 From: Geoff Reedy Date: Mon, 6 Feb 2012 09:05:32 -0700 Subject: Automatically fetch jars as needed Use mappers with uptodate and touch tasks to detect if any jars need to be downloaded based on the modification time of the desired.sha1 files --- build.xml | 33 +++++++++++++++++++-------------- 1 file changed, 19 insertions(+), 14 deletions(-) diff --git a/build.xml b/build.xml index 57d2eed1c0..d5a35bd7e7 100644 --- a/build.xml +++ b/build.xml @@ -210,28 +210,33 @@ PROPERTIES INITIALISATION ============================================================================ --> - - - - - - - + + + + + + + + + - + - - + + + + + + + + - + + + - - - - + + -- cgit v1.2.3 From adb218d8d230b4713942f5b220d8cd0602995aae Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Mon, 13 Feb 2012 22:04:54 +0100 Subject: Fixes https://issues.scala-lang.org/browse/SI-5229 --- .../scala/reflect/internal/Importers.scala | 108 ++++++++++++--------- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 11 ++- .../scala/reflect/runtime/TreeBuildUtil.scala | 20 +++- src/compiler/scala/reflect/runtime/Universe.scala | 2 +- src/compiler/scala/tools/nsc/ast/Reifiers.scala | 10 ++ src/compiler/scala/tools/nsc/ast/Trees.scala | 30 ++++-- test/files/run/reify_inner1.check | 1 + test/files/run/reify_inner1.scala | 22 +++++ test/files/run/reify_inner2.check | 1 + test/files/run/reify_inner2.scala | 22 +++++ test/files/run/reify_inner3.check | 1 + test/files/run/reify_inner3.scala | 22 +++++ test/files/run/reify_inner4.check | 1 + test/files/run/reify_inner4.scala | 22 +++++ test/files/run/t5229_1.check | 0 test/files/run/t5229_1.scala | 14 +++ test/files/run/t5229_2.check | 2 + test/files/run/t5229_2.scala | 19 ++++ test/files/run/t5270.check | 1 + test/files/run/t5270.scala | 26 +++++ test/pending/run/t5229_1.check | 0 test/pending/run/t5229_1.scala | 14 --- test/pending/run/t5229_1_nolift.scala | 1 - test/pending/run/t5229_2.check | 2 - test/pending/run/t5229_2.scala | 19 ---- test/pending/run/t5270.check | 1 - test/pending/run/t5270.scala | 26 ----- 27 files changed, 273 insertions(+), 125 deletions(-) create mode 100644 test/files/run/reify_inner1.check create mode 100644 test/files/run/reify_inner1.scala create mode 100644 test/files/run/reify_inner2.check create mode 100644 test/files/run/reify_inner2.scala create mode 100644 test/files/run/reify_inner3.check create mode 100644 test/files/run/reify_inner3.scala create mode 100644 test/files/run/reify_inner4.check create mode 100644 test/files/run/reify_inner4.scala create mode 100644 test/files/run/t5229_1.check create mode 100644 test/files/run/t5229_1.scala create mode 100644 test/files/run/t5229_2.check create mode 100644 test/files/run/t5229_2.scala create mode 100644 test/files/run/t5270.check create mode 100644 test/files/run/t5270.scala delete mode 100644 test/pending/run/t5229_1.check delete mode 100644 test/pending/run/t5229_1.scala delete mode 100644 test/pending/run/t5229_1_nolift.scala delete mode 100644 test/pending/run/t5229_2.check delete mode 100644 test/pending/run/t5229_2.scala delete mode 100644 test/pending/run/t5270.check delete mode 100644 test/pending/run/t5270.scala diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index 63efaede07..c232e3b7c1 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -32,8 +32,11 @@ trait Importers { self: SymbolTable => def importPosition(pos: from.Position): Position = NoPosition - def importSymbol(sym: from.Symbol): Symbol = { + def importSymbol(sym0: from.Symbol): Symbol = { def doImport(sym: from.Symbol): Symbol = { + if (symMap.contains(sym)) + return symMap(sym) + val myowner = importSymbol(sym.owner) val mypos = importPosition(sym.pos) val myname = importName(sym.name).toTermName @@ -47,7 +50,7 @@ trait Importers { self: SymbolTable => case x: from.MethodSymbol => linkReferenced(myowner.newMethod(myname, mypos, myflags), x, importSymbol) case x: from.ModuleSymbol => - linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, doImport) + linkReferenced(myowner.newModuleSymbol(myname, mypos, myflags), x, importSymbol) case x: from.FreeVar => newFreeVar(importName(x.name).toTermName, importType(x.tpe), x.value, myflags) case x: from.TermSymbol => @@ -59,14 +62,14 @@ trait Importers { self: SymbolTable => case y: from.Symbol => importSymbol(y) } myowner.newTypeSkolemSymbol(myname.toTypeName, origin, mypos, myflags) - /* - case x: from.ModuleClassSymbol => - val mysym = new ModuleClassSymbol(myowner, mypos, myname.toTypeName) - mysym.sourceModule = importSymbol(x.sourceModule) - mysym -*/ + case x: from.ModuleClassSymbol => + val mysym = myowner.newModuleClassSymbol(myname.toTypeName, mypos, myflags) + symMap(x) = mysym + mysym.sourceModule = importSymbol(x.sourceModule) + mysym case x: from.ClassSymbol => val mysym = myowner.newClassSymbol(myname.toTypeName, mypos, myflags) + symMap(x) = mysym if (sym.thisSym != sym) { mysym.typeOfThis = importType(sym.typeOfThis) mysym.thisSym.name = importName(sym.thisSym.name) @@ -78,7 +81,7 @@ trait Importers { self: SymbolTable => symMap(sym) = mysym mysym setFlag Flags.LOCKED mysym setInfo { - val mytypeParams = sym.typeParams map doImport + val mytypeParams = sym.typeParams map importSymbol new LazyPolyType(mytypeParams) { override def complete(s: Symbol) { val result = sym.info match { @@ -94,6 +97,7 @@ trait Importers { self: SymbolTable => } // end doImport def importOrRelink: Symbol = { + val sym = sym0 // makes sym visible in the debugger if (sym == null) null else if (sym == from.NoSymbol) @@ -101,51 +105,61 @@ trait Importers { self: SymbolTable => else if (sym.isRoot) definitions.RootClass else { - val myowner = importSymbol(sym.owner) - val myname = importName(sym.name) - if (sym.isModuleClass) { - assert(sym.sourceModule != NoSymbol, sym) - val mymodule = importSymbol(sym.sourceModule) - assert(mymodule != NoSymbol, sym) - assert(mymodule.moduleClass != NoSymbol, mymodule) - mymodule.moduleClass - } else if (myowner.isClass && !myowner.isRefinementClass && !(myowner hasFlag Flags.LOCKED) && sym.owner.info.decl(sym.name).exists) { - // symbol is in class scope, try to find equivalent one in local scope - if (sym.isOverloaded) - myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol) - else { - var existing: Symbol = myowner.info.decl(myname) - if (existing.isOverloaded) { - existing = - if (sym.isMethod) { - val localCopy = doImport(sym) - existing filter (_.tpe matches localCopy.tpe) - } else { - existing filter (!_.isMethod) - } - assert(!existing.isOverloaded, - "import failure: cannot determine unique overloaded method alternative from\n "+ - (existing.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe) + val name = sym.name + val owner = sym.owner + var scope = if (owner.isClass && !owner.isRefinementClass) owner.info else from.NoType + var existing = scope.decl(name) + if (sym.isPackageClass || sym.isModuleClass) existing = existing.moduleClass + if (!existing.exists) scope = from.NoType + + val myname = importName(name) + val myowner = importSymbol(owner) + val myscope = if (scope != from.NoType && !(myowner hasFlag Flags.LOCKED)) myowner.info else NoType + var myexisting = if (myscope != NoType) myowner.info.decl(myname) else NoSymbol // cannot load myexisting in general case, because it creates cycles for methods + if (sym.isPackageClass || sym.isModuleClass) myexisting = importSymbol(sym.sourceModule).moduleClass + if (!sym.isOverloaded && myexisting.isOverloaded) { + myexisting = + if (sym.isMethod) { + val localCopy = doImport(sym) + myexisting filter (_.tpe matches localCopy.tpe) + } else { + myexisting filter (!_.isMethod) } - if (existing != NoSymbol) existing - else { + assert(!myexisting.isOverloaded, + "import failure: cannot determine unique overloaded method alternative from\n "+ + (myexisting.alternatives map (_.defString) mkString "\n")+"\n that matches "+sym+":"+sym.tpe) + } + + val mysym = { + if (sym.isOverloaded) { + myowner.newOverloaded(myowner.thisType, sym.alternatives map importSymbol) + } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) { + assert(myowner.typeParams.length > sym.paramPos, + "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+ + myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+ + sym.owner+from.typeParamsString(sym.owner.info)) + myowner.typeParams(sym.paramPos) + } else { + if (myexisting != NoSymbol) { + myexisting + } else { val mysym = doImport(sym) - assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+existing) - myowner.info.decls enter mysym + + if (myscope != NoType) { + assert(myowner.info.decls.lookup(myname) == NoSymbol, myname+" "+myowner.info.decl(myname)+" "+myexisting) + myowner.info.decls enter mysym + } + mysym } } - } else if (sym.isTypeParameter && sym.paramPos >= 0 && !(myowner hasFlag Flags.LOCKED)) { - assert(myowner.typeParams.length > sym.paramPos, - "import failure: cannot determine parameter "+sym+" (#"+sym.paramPos+") in "+ - myowner+typeParamsString(myowner.rawInfo)+"\n original symbol was: "+ - sym.owner+from.typeParamsString(sym.owner.info)) - myowner.typeParams(sym.paramPos) - } else - doImport(sym) + } + + mysym } } // end importOrRelink + val sym = sym0 if (symMap contains sym) { symMap(sym) } else { @@ -410,4 +424,4 @@ trait Importers { self: SymbolTable => case _ => constant.value }) } -} +} \ No newline at end of file diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index c09022e535..880c68eaa0 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -44,11 +44,11 @@ trait ToolBoxes extends { self: Universe => // !!! Why is this is in the empty package? If it's only to make // it inaccessible then please put it somewhere designed for that // rather than polluting the empty package with synthetics. - trace("typing: ")(showAttributed(tree)) + trace("typing: ")(showAttributed(tree, true, true, settings.Yshowsymkinds.value)) val ownerClass = EmptyPackageClass.newClassWithInfo(newTypeName(""), List(ObjectClass.tpe), newScope) val owner = ownerClass.newLocalDummy(tree.pos) val ttree = typer.atOwner(tree, owner).typed(tree, analyzer.EXPRmode, pt) - trace("typed: ")(showAttributed(ttree)) + trace("typed: ")(showAttributed(ttree, true, true, settings.Yshowsymkinds.value)) ttree } @@ -78,9 +78,9 @@ trait ToolBoxes extends { self: Universe => List(List()), List(methdef), NoPosition)) - trace("wrapped: ")(showAttributed(moduledef)) + trace("wrapped: ")(showAttributed(moduledef, true, true, settings.Yshowsymkinds.value)) val cleanedUp = resetLocalAttrs(moduledef) - trace("cleaned up: ")(showAttributed(cleanedUp)) + trace("cleaned up: ")(showAttributed(cleanedUp, true, true, settings.Yshowsymkinds.value)) cleanedUp } @@ -192,7 +192,8 @@ trait ToolBoxes extends { self: Universe => def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = { if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType) val ttree = importAndTypeCheck(tree, expectedType) - exporter.importTree(ttree).asInstanceOf[rm.Tree] + val ettree = exporter.importTree(ttree).asInstanceOf[rm.Tree] + ettree } def typeCheck(tree: rm.Tree): rm.Tree = diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala index 275c85f332..0b54843344 100644 --- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala +++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala @@ -2,9 +2,23 @@ package scala.reflect package runtime trait TreeBuildUtil extends Universe with api.TreeBuildUtil { - def staticClass(fullname: String): Symbol = definitions.getRequiredClass(fullname) - def staticModule(fullname: String): Symbol = definitions.getRequiredModule(fullname) - def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.thisType + def staticClass(fullname: String): Symbol = { + val sym = definitions.getRequiredClass(fullname) + sym.initialize + sym + } + + def staticModule(fullname: String): Symbol = { + val sym = definitions.getRequiredModule(fullname) + sym.initialize + sym + } + + def thisModuleType(fullname: String) = { + val sym = staticModule(fullname).moduleClass + sym.initialize + sym.thisType + } /** Selects type symbol with given name from the defined members of prefix type */ diff --git a/src/compiler/scala/reflect/runtime/Universe.scala b/src/compiler/scala/reflect/runtime/Universe.scala index c786bb86c5..700f819226 100644 --- a/src/compiler/scala/reflect/runtime/Universe.scala +++ b/src/compiler/scala/reflect/runtime/Universe.scala @@ -16,7 +16,7 @@ class Universe extends SymbolTable { val gen = new TreeGen { val global: Universe.this.type = Universe.this } - def settings = new Settings + lazy val settings = new Settings def forInteractive = false def forScaladoc = false diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala index b82d78b786..91d5d2bf4a 100644 --- a/src/compiler/scala/tools/nsc/ast/Reifiers.scala +++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala @@ -325,6 +325,16 @@ trait Reifiers { self: Global => // registerReifiableSymbol(tree.symbol) boundSyms += tree.symbol + if (tree.symbol.sourceModule != NoSymbol) { + if (reifyDebug) println("boundSym (sourceModule): " + tree.symbol.sourceModule) + boundSyms += tree.symbol.sourceModule + } + + if (tree.symbol.moduleClass != NoSymbol) { + if (reifyDebug) println("boundSym (moduleClass): " + tree.symbol.moduleClass) + boundSyms += tree.symbol.moduleClass + } + val prefix = tree.productPrefix val elements = (tree.productIterator map { // annotations exist in two flavors: diff --git a/src/compiler/scala/tools/nsc/ast/Trees.scala b/src/compiler/scala/tools/nsc/ast/Trees.scala index 9e304a0eb5..855b55bb5e 100644 --- a/src/compiler/scala/tools/nsc/ast/Trees.scala +++ b/src/compiler/scala/tools/nsc/ast/Trees.scala @@ -251,12 +251,27 @@ trait Trees extends reflect.internal.Trees { self: Global => * (bq:) This transformer has mutable state and should be discarded after use */ private class ResetAttrs(localOnly: Boolean) { + val debug = settings.debug.value + val trace = scala.tools.nsc.util.trace when debug + val locals = util.HashSet[Symbol](8) + val orderedLocals = collection.mutable.ListBuffer[Symbol]() + def registerLocal(sym: Symbol) { + if (sym != null && sym != NoSymbol) { + if (debug && !(locals contains sym)) orderedLocals append sym + locals addEntry sym + } + } class MarkLocals extends self.Traverser { - def markLocal(tree: Tree) = - if (tree.symbol != null && tree.symbol != NoSymbol) - locals addEntry tree.symbol + def markLocal(tree: Tree) { + if (tree.symbol != null && tree.symbol != NoSymbol) { + val sym = tree.symbol + registerLocal(sym) + registerLocal(sym.sourceModule) + registerLocal(sym.moduleClass) + } + } override def traverse(tree: Tree) = { tree match { @@ -301,9 +316,12 @@ trait Trees extends reflect.internal.Trees { self: Global => def transform[T <: Tree](x: T): T = { new MarkLocals().traverse(x) - val trace = scala.tools.nsc.util.trace when settings.debug.value - val eoln = System.getProperty("line.separator") - trace("locals (%d total): %n".format(locals.size))(locals.toList map {" " + _} mkString eoln) + if (debug) { + assert(locals.size == orderedLocals.size) + val eoln = System.getProperty("line.separator") + val msg = orderedLocals.toList filter {_ != NoSymbol} map {" " + _} mkString eoln + trace("locals (%d total): %n".format(orderedLocals.size))(msg) + } val x1 = new Transformer().transform(x) assert(x.getClass isInstance x1) diff --git a/test/files/run/reify_inner1.check b/test/files/run/reify_inner1.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/reify_inner1.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/reify_inner1.scala b/test/files/run/reify_inner1.scala new file mode 100644 index 0000000000..69931198e0 --- /dev/null +++ b/test/files/run/reify_inner1.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { + class D { + val x = 2 + } + } + + val outer = new C() + val inner = new outer.D() + println(inner.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_inner2.check b/test/files/run/reify_inner2.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/reify_inner2.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/reify_inner2.scala b/test/files/run/reify_inner2.scala new file mode 100644 index 0000000000..0f12fd472a --- /dev/null +++ b/test/files/run/reify_inner2.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { + object D { + val x = 2 + } + } + + val outer = new C() + val inner = outer.D + println(inner.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_inner3.check b/test/files/run/reify_inner3.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/reify_inner3.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/reify_inner3.scala b/test/files/run/reify_inner3.scala new file mode 100644 index 0000000000..6b97b42b34 --- /dev/null +++ b/test/files/run/reify_inner3.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C { + class D { + val x = 2 + } + } + + val outer = C + val inner = new outer.D + println(inner.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/reify_inner4.check b/test/files/run/reify_inner4.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/reify_inner4.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/reify_inner4.scala b/test/files/run/reify_inner4.scala new file mode 100644 index 0000000000..de8c973b09 --- /dev/null +++ b/test/files/run/reify_inner4.scala @@ -0,0 +1,22 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C { + object D { + val x = 2 + } + } + + val outer = C + val inner = outer.D + println(inner.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5229_1.check b/test/files/run/t5229_1.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/t5229_1.scala b/test/files/run/t5229_1.scala new file mode 100644 index 0000000000..1d7bf0590b --- /dev/null +++ b/test/files/run/t5229_1.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5229_2.check b/test/files/run/t5229_2.check new file mode 100644 index 0000000000..5db6ec9b38 --- /dev/null +++ b/test/files/run/t5229_2.check @@ -0,0 +1,2 @@ +2 +evaluated = null diff --git a/test/files/run/t5229_2.scala b/test/files/run/t5229_2.scala new file mode 100644 index 0000000000..67be7328a6 --- /dev/null +++ b/test/files/run/t5229_2.scala @@ -0,0 +1,19 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C { + val x = 2 + } + + println(C.x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + val evaluated = toolbox.runExpr(ttree) + println("evaluated = " + evaluated) +} diff --git a/test/files/run/t5270.check b/test/files/run/t5270.check new file mode 100644 index 0000000000..08839f6bb2 --- /dev/null +++ b/test/files/run/t5270.check @@ -0,0 +1 @@ +200 diff --git a/test/files/run/t5270.scala b/test/files/run/t5270.scala new file mode 100644 index 0000000000..10f79790b0 --- /dev/null +++ b/test/files/run/t5270.scala @@ -0,0 +1,26 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class Y { + def y = 100 + } + + trait Z { this: Y => + val z = 2 * y + } + + class X extends Y with Z { + def println() = Predef.println(z) + } + + new X().println() + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/pending/run/t5229_1.check b/test/pending/run/t5229_1.check deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/test/pending/run/t5229_1.scala b/test/pending/run/t5229_1.scala deleted file mode 100644 index 1d7bf0590b..0000000000 --- a/test/pending/run/t5229_1.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - object C - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5229_1_nolift.scala b/test/pending/run/t5229_1_nolift.scala deleted file mode 100644 index 33855d2e4f..0000000000 --- a/test/pending/run/t5229_1_nolift.scala +++ /dev/null @@ -1 +0,0 @@ -object C diff --git a/test/pending/run/t5229_2.check b/test/pending/run/t5229_2.check deleted file mode 100644 index 5db6ec9b38..0000000000 --- a/test/pending/run/t5229_2.check +++ /dev/null @@ -1,2 +0,0 @@ -2 -evaluated = null diff --git a/test/pending/run/t5229_2.scala b/test/pending/run/t5229_2.scala deleted file mode 100644 index 67be7328a6..0000000000 --- a/test/pending/run/t5229_2.scala +++ /dev/null @@ -1,19 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - object C { - val x = 2 - } - - println(C.x) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) - println("evaluated = " + evaluated) -} diff --git a/test/pending/run/t5270.check b/test/pending/run/t5270.check deleted file mode 100644 index 08839f6bb2..0000000000 --- a/test/pending/run/t5270.check +++ /dev/null @@ -1 +0,0 @@ -200 diff --git a/test/pending/run/t5270.scala b/test/pending/run/t5270.scala deleted file mode 100644 index 10f79790b0..0000000000 --- a/test/pending/run/t5270.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class Y { - def y = 100 - } - - trait Z { this: Y => - val z = 2 * y - } - - class X extends Y with Z { - def println() = Predef.println(z) - } - - new X().println() - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} -- cgit v1.2.3 From 92fc4e351300e927ae1a8b0a6c383d00e3968c5d Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 09:34:03 -0800 Subject: Existential printing, plus more compiler testing infrastructure. Direct compiler internals testing. It's really easy, you should probably use it about 1000 times each. Look at the test: run/existentials-in-compiler.scala The checkfile contains the (string representations of the) actual existentials from the compiler to make sure they correspond properly to the ones in the source. Existentials were being printed with wildcards too freely; this has been tightened up. --- .../scala/reflect/internal/Definitions.scala | 28 ++-- .../scala/reflect/internal/SymbolTable.scala | 8 +- src/compiler/scala/reflect/internal/Types.scala | 65 ++++++--- src/compiler/scala/tools/nsc/Global.scala | 8 +- .../scala/tools/nsc/transform/Erasure.scala | 24 ++-- .../tools/nsc/typechecker/SyntheticMethods.scala | 4 +- src/partest/scala/tools/partest/CompilerTest.scala | 27 ++++ src/partest/scala/tools/partest/DirectTest.scala | 32 ++++- test/files/run/existentials-in-compiler.check | 156 +++++++++++++++++++++ test/files/run/existentials-in-compiler.scala | 83 +++++++++++ 10 files changed, 385 insertions(+), 50 deletions(-) create mode 100644 src/partest/scala/tools/partest/CompilerTest.scala create mode 100644 test/files/run/existentials-in-compiler.check create mode 100644 test/files/run/existentials-in-compiler.scala diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index e05ac1087b..5b2c61701d 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -586,14 +586,6 @@ trait Definitions extends reflect.api.StandardDefinitions { case _ => NoType } - /** To avoid unchecked warnings on polymorphic classes, translate - * a Foo[T] into a Foo[_] for use in the pattern matcher. - */ - def typeCaseType(clazz: Symbol) = clazz.tpe.normalize match { - case TypeRef(_, sym, args) if args.nonEmpty => newExistentialType(sym.typeParams, clazz.tpe) - case tp => tp - } - def seqType(arg: Type) = appliedType(SeqClass.typeConstructor, List(arg)) def arrayType(arg: Type) = appliedType(ArrayClass.typeConstructor, List(arg)) def byNameType(arg: Type) = appliedType(ByNameParamClass.typeConstructor, List(arg)) @@ -609,6 +601,26 @@ trait Definitions extends reflect.api.StandardDefinitions { def vmClassType(arg: Type): Type = ClassType(arg) def vmSignature(sym: Symbol, info: Type): String = signature(info) // !!! + /** Given a class symbol C with type parameters T1, T2, ... Tn + * which have upper/lower bounds LB1/UB1, LB1/UB2, ..., LBn/UBn, + * returns an existential type of the form + * + * C[E1, ..., En] forSome { E1 >: LB1 <: UB1 ... en >: LBn <: UBn }. + */ + def classExistentialType(clazz: Symbol): Type = + newExistentialType(clazz.typeParams, clazz.tpe) + + /** Given type U, creates a Type representing Class[_ <: U]. + */ + def boundedClassType(upperBound: Type) = + appliedTypeAsUpperBounds(ClassClass.typeConstructor, List(upperBound)) + + /** To avoid unchecked warnings on polymorphic classes, translate + * a Foo[T] into a Foo[_] for use in the pattern matcher. + */ + @deprecated("Use classExistentialType", "2.10.0") + def typeCaseType(clazz: Symbol): Type = classExistentialType(clazz) + // // .NET backend // diff --git a/src/compiler/scala/reflect/internal/SymbolTable.scala b/src/compiler/scala/reflect/internal/SymbolTable.scala index fb827b0658..1973a97279 100644 --- a/src/compiler/scala/reflect/internal/SymbolTable.scala +++ b/src/compiler/scala/reflect/internal/SymbolTable.scala @@ -120,9 +120,11 @@ abstract class SymbolTable extends api.Universe try op finally phase = current } - - @inline final def afterPhase[T](ph: Phase)(op: => T): T = - atPhase(ph.next)(op) + /** Since when it is to be "at" a phase is inherently ambiguous, + * a couple unambiguously named methods. + */ + @inline final def beforePhase[T](ph: Phase)(op: => T): T = atPhase(ph)(op) + @inline final def afterPhase[T](ph: Phase)(op: => T): T = atPhase(ph.next)(op) @inline final def atPhaseNotLaterThan[T](target: Phase)(op: => T): T = if (target != NoPhase && phase.id > target.id) atPhase(target)(op) else op diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index cd44b700c1..6295c089b2 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -2433,25 +2433,37 @@ trait Types extends api.Types { self: SymbolTable => case _ => List() } - + /** An existential can only be printed with wildcards if: + * - the underlying type is a typeref + * - where there is a 1-to-1 correspondence between underlying's typeargs and quantified + * - and none of the existential parameters is referenced from anywhere else in the type + * - and none of the existential parameters are singleton types + */ + private def isRepresentableWithWildcards = !settings.debug.value && { + val qset = quantified.toSet + !qset.exists(_.isSingletonExistential) && (underlying match { + case TypeRef(_, sym, args) => + sameLength(args, quantified) && { + args forall { arg => + qset(arg.typeSymbol) && !qset.exists(arg.typeSymbol.info.bounds contains _) + } + } + case _ => false + }) + } override def safeToString: String = { - if (!(quantified exists (_.isSingletonExistential)) && !settings.debug.value) - // try to represent with wildcards first - underlying match { - case TypeRef(pre, sym, args) if args.nonEmpty => - val wargs = wildcardArgsString(quantified.toSet, args) - if (sameLength(wargs, args)) - return TypeRef(pre, sym, List()) + wargs.mkString("[", ", ", "]") - case _ => - } - var ustr = underlying.toString + def clauses = { + val str = quantified map (_.existentialToString) mkString (" forSome { ", "; ", " }") + if (settings.explaintypes.value) "(" + str + ")" else str + } underlying match { - case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => ustr = "("+ustr+")" + case TypeRef(pre, sym, args) if isRepresentableWithWildcards => + "" + TypeRef(pre, sym, Nil) + wildcardArgsString(quantified.toSet, args).mkString("[", ", ", "]") + case MethodType(_, _) | NullaryMethodType(_) | PolyType(_, _) => + "(" + underlying + ")" + clauses case _ => + "" + underlying + clauses } - val str = - ustr+(quantified map (_.existentialToString) mkString(" forSome { ", "; ", " }")) - if (settings.explaintypes.value) "("+str+")" else str } override def cloneInfo(owner: Symbol) = @@ -3260,6 +3272,25 @@ trait Types extends api.Types { self: SymbolTable => case WildcardType => tycon // needed for neg/t0226 case _ => abort(debugString(tycon)) } + + /** A creator for existential types where the type arguments, + * rather than being applied directly, are interpreted as the + * upper bounds of unknown types. For instance if the type argument + * list given is List(AnyRefClass), the resulting type would be + * e.g. Set[_ <: AnyRef] rather than Set[AnyRef] . + */ + def appliedTypeAsUpperBounds(tycon: Type, args: List[Type]): Type = { + tycon match { + case TypeRef(pre, sym, _) if sameLength(sym.typeParams, args) => + val eparams = typeParamsToExistentials(sym) + val bounds = args map (TypeBounds upper _) + (eparams, bounds).zipped foreach (_ setInfo _) + + newExistentialType(eparams, typeRef(pre, sym, eparams map (_.tpe))) + case _ => + appliedType(tycon, args) + } + } /** A creator for type parameterizations that strips empty type parameter lists. * Use this factory method to indicate the type has kind * (it's a polymorphic value) @@ -3845,6 +3876,8 @@ trait Types extends api.Types { self: SymbolTable => eparams map (_ substInfo (tparams, eparams)) } + def typeParamsToExistentials(clazz: Symbol): List[Symbol] = + typeParamsToExistentials(clazz, clazz.typeParams) // note: it's important to write the two tests in this order, // as only typeParams forces the classfile to be read. See #400 @@ -3876,7 +3909,7 @@ trait Types extends api.Types { self: SymbolTable => if (expanded contains sym) AnyRefClass.tpe else try { expanded += sym - val eparams = mapOver(typeParamsToExistentials(sym, sym.typeParams)) + val eparams = mapOver(typeParamsToExistentials(sym)) existentialAbstraction(eparams, typeRef(apply(pre), sym, eparams map (_.tpe))) } finally { expanded -= sym diff --git a/src/compiler/scala/tools/nsc/Global.scala b/src/compiler/scala/tools/nsc/Global.scala index 18735cafe2..4493188b31 100644 --- a/src/compiler/scala/tools/nsc/Global.scala +++ b/src/compiler/scala/tools/nsc/Global.scala @@ -807,6 +807,10 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb def currentRun: Run = curRun def currentUnit: CompilationUnit = if (currentRun eq null) NoCompilationUnit else currentRun.currentUnit def currentSource: SourceFile = if (currentUnit.exists) currentUnit.source else lastSeenSourceFile + + @inline final def afterTyper[T](op: => T): T = afterPhase(currentRun.typerPhase)(op) + @inline final def beforeErasure[T](op: => T): T = beforePhase(currentRun.erasurePhase)(op) + @inline final def afterErasure[T](op: => T): T = afterPhase(currentRun.erasurePhase)(op) /** Don't want to introduce new errors trying to report errors, * so swallow exceptions. @@ -1114,7 +1118,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb lazy val trackers = currentRun.units.toList map (x => SymbolTracker(x)) def snapshot() = { inform("\n[[symbol layout at end of " + phase + "]]") - atPhase(phase.next) { + afterPhase(phase) { trackers foreach { t => t.snapshot() inform(t.show("Heading from " + phase.prev.name + " to " + phase.name)) @@ -1389,7 +1393,7 @@ class Global(var currentSettings: Settings, var reporter: Reporter) extends Symb def printAllUnits() { print("[[syntax trees at end of " + phase + "]]") - atPhase(phase.next) { currentRun.units foreach (treePrinter.print(_)) } + afterPhase(phase) { currentRun.units foreach (treePrinter.print(_)) } } private def findMemberFromRoot(fullName: Name): Symbol = { diff --git a/src/compiler/scala/tools/nsc/transform/Erasure.scala b/src/compiler/scala/tools/nsc/transform/Erasure.scala index fe479a5375..5f84d765b9 100644 --- a/src/compiler/scala/tools/nsc/transform/Erasure.scala +++ b/src/compiler/scala/tools/nsc/transform/Erasure.scala @@ -44,22 +44,16 @@ abstract class Erasure extends AddInterfaces // class object is that of java.lang.Integer, not Int. // // TODO: If T is final, return type could be Class[T]. Should it? - def getClassReturnType(tp: Type): Type = { - val sym = tp.typeSymbol - - if (phase.erasedTypes) ClassClass.tpe - else if (isValueClass(sym)) ClassType(tp.widen) - else { - val eparams = typeParamsToExistentials(ClassClass, ClassClass.typeParams) - val upperBound = ( - if (isPhantomClass(sym)) AnyClass.tpe + def getClassReturnType(tpe: Type): Type = { + if (phase.erasedTypes) ClassClass.tpe else { + val tp = tpe.widen.normalize + val sym = tp.typeSymbol + + if (isValueClass(sym)) ClassType(tp) + else boundedClassType( + if (isPhantomClass(sym)) ObjectClass.tpe else if (sym.isLocalClass) intersectionDominator(tp.parents) - else tp.widen - ) - - existentialAbstraction( - eparams, - ClassType(eparams.head setInfo TypeBounds.upper(upperBound) tpe) + else tp ) } } diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 4ea21b1c44..cf90577959 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -116,7 +116,7 @@ trait SyntheticMethods extends ast.TreeDSL { */ def canEqualMethod: Tree = ( createMethod(nme.canEqual_, List(AnyClass.tpe), BooleanClass.tpe)(m => - Ident(m.firstParam) IS_OBJ typeCaseType(clazz)) + Ident(m.firstParam) IS_OBJ classExistentialType(clazz)) ) /** The equality method for case classes. @@ -132,7 +132,7 @@ trait SyntheticMethods extends ast.TreeDSL { */ def equalsClassMethod: Tree = createMethod(nme.equals_, List(AnyClass.tpe), BooleanClass.tpe) { m => val arg0 = Ident(m.firstParam) - val thatTest = gen.mkIsInstanceOf(arg0, typeCaseType(clazz), true, false) + val thatTest = gen.mkIsInstanceOf(arg0, classExistentialType(clazz), true, false) val thatCast = gen.mkCast(arg0, clazz.tpe) def argsBody: Tree = { diff --git a/src/partest/scala/tools/partest/CompilerTest.scala b/src/partest/scala/tools/partest/CompilerTest.scala new file mode 100644 index 0000000000..dd06c051a4 --- /dev/null +++ b/src/partest/scala/tools/partest/CompilerTest.scala @@ -0,0 +1,27 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.partest + +import scala.tools.nsc._ + +/** For testing compiler internals directly. + * Each source code string in "sources" will be compiled, and + * the check function will be called with the source code and the + * resulting CompilationUnit. The check implementation should + * test for what it wants to test and fail (via assert or other + * exception) if it is not happy. + */ +abstract class CompilerTest extends DirectTest { + def check(source: String, unit: global.CompilationUnit): Unit + + lazy val global: Global = newCompiler() + lazy val units = compilationUnits(global)(sources: _ *) + + override def extraSettings = "-usejavacp -d " + testOutput.path + + def sources: List[String] = List(code) + def show() = (sources, units).zipped foreach check +} diff --git a/src/partest/scala/tools/partest/DirectTest.scala b/src/partest/scala/tools/partest/DirectTest.scala index be8cac9147..74f511aa4e 100644 --- a/src/partest/scala/tools/partest/DirectTest.scala +++ b/src/partest/scala/tools/partest/DirectTest.scala @@ -35,13 +35,37 @@ abstract class DirectTest extends App { s processArguments (allArgs, true) s } - // compile the code, optionally first adding to the settings - def compile(args: String*) = { + // new compiler + def newCompiler(args: String*): Global = { val settings = newSettings((CommandLineParser tokenize extraSettings) ++ args.toList) - val global = new Global(settings) - new global.Run compileSources List(new BatchSourceFile("", code)) + new Global(settings) + } + def newSources(sourceCodes: String*) = sourceCodes.toList.zipWithIndex map { + case (src, idx) => new BatchSourceFile("newSource" + (idx + 1), src) + } + def compileString(global: Global)(sourceCode: String): Boolean = { + withRun(global)(_ compileSources newSources(sourceCode)) !global.reporter.hasErrors } + def compilationUnits(global: Global)(sourceCodes: String*): List[global.CompilationUnit] = { + val units = withRun(global) { run => + run compileSources newSources(sourceCodes: _*) + run.units.toList + } + if (global.reporter.hasErrors) { + global.reporter.flush() + sys.error("Compilation failure.") + } + units + } + + def withRun[T](global: Global)(f: global.Run => T): T = { + global.reporter.reset() + f(new global.Run) + } + + // compile the code, optionally first adding to the settings + def compile(args: String*) = compileString(newCompiler(args: _*))(code) /** Constructor/main body **/ try show() diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check new file mode 100644 index 0000000000..c8040a4cb1 --- /dev/null +++ b/test/files/run/existentials-in-compiler.check @@ -0,0 +1,156 @@ +abstract trait Bippy[A <: AnyRef,B] extends Object + extest.Bippy[_ <: AnyRef, _] + +abstract trait BippyBud[A <: AnyRef,B,C <: List[A]] extends Object + extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] } + +abstract trait BippyLike[A <: AnyRef,B <: List[A],This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object + extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] } + +abstract trait Contra[-A >: AnyRef,-B] extends Object + extest.Contra[_ >: AnyRef, _] + +abstract trait ContraLike[-A >: AnyRef,-B >: List[A]] extends Object + extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] } + +abstract trait Cov01[+A <: AnyRef,+B] extends Object + extest.Cov01[_ <: AnyRef, _] + +abstract trait Cov02[+A <: AnyRef,B] extends Object + extest.Cov02[_ <: AnyRef, _] + +abstract trait Cov03[+A <: AnyRef,-B] extends Object + extest.Cov03[_ <: AnyRef, _] + +abstract trait Cov04[A <: AnyRef,+B] extends Object + extest.Cov04[_ <: AnyRef, _] + +abstract trait Cov05[A <: AnyRef,B] extends Object + extest.Cov05[_ <: AnyRef, _] + +abstract trait Cov06[A <: AnyRef,-B] extends Object + extest.Cov06[_ <: AnyRef, _] + +abstract trait Cov07[-A <: AnyRef,+B] extends Object + extest.Cov07[_ <: AnyRef, _] + +abstract trait Cov08[-A <: AnyRef,B] extends Object + extest.Cov08[_ <: AnyRef, _] + +abstract trait Cov09[-A <: AnyRef,-B] extends Object + extest.Cov09[_ <: AnyRef, _] + +abstract trait Cov11[+A <: AnyRef,+B <: List[_]] extends Object + extest.Cov11[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov12[+A <: AnyRef,B <: List[_]] extends Object + extest.Cov12[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov13[+A <: AnyRef,-B <: List[_]] extends Object + extest.Cov13[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov14[A <: AnyRef,+B <: List[_]] extends Object + extest.Cov14[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov15[A <: AnyRef,B <: List[_]] extends Object + extest.Cov15[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov16[A <: AnyRef,-B <: List[_]] extends Object + extest.Cov16[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov17[-A <: AnyRef,+B <: List[_]] extends Object + extest.Cov17[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov18[-A <: AnyRef,B <: List[_]] extends Object + extest.Cov18[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov19[-A <: AnyRef,-B <: List[_]] extends Object + extest.Cov19[_ <: AnyRef, _ <: List[_]] + +abstract trait Cov21[+A,+B] extends Object + extest.Cov21[_, _] + +abstract trait Cov22[+A,B] extends Object + extest.Cov22[_, _] + +abstract trait Cov23[+A,-B] extends Object + extest.Cov23[_, _] + +abstract trait Cov24[A,+B] extends Object + extest.Cov24[_, _] + +abstract trait Cov25[A,B] extends Object + extest.Cov25[_, _] + +abstract trait Cov26[A,-B] extends Object + extest.Cov26[_, _] + +abstract trait Cov27[-A,+B] extends Object + extest.Cov27[_, _] + +abstract trait Cov28[-A,B] extends Object + extest.Cov28[_, _] + +abstract trait Cov29[-A,-B] extends Object + extest.Cov29[_, _] + +abstract trait Cov31[+A,+B,C <: (A, B)] extends Object + extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) } + +abstract trait Cov32[+A,B,C <: (A, B)] extends Object + extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) } + +abstract trait Cov33[+A,-B,C <: (A, _$10) forSome { type _$10 }] extends Object + extest.Cov33[A,B,C] forSome { +A; -B; C <: (A, _$10) forSome { type _$10 } } + +abstract trait Cov34[A,+B,C <: (A, B)] extends Object + extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) } + +abstract trait Cov35[A,B,C <: (A, B)] extends Object + extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) } + +abstract trait Cov36[A,-B,C <: (A, _$11) forSome { type _$11 }] extends Object + extest.Cov36[A,B,C] forSome { A; -B; C <: (A, _$11) forSome { type _$11 } } + +abstract trait Cov37[-A,+B,C <: (_$12, B) forSome { type _$12 }] extends Object + extest.Cov37[A,B,C] forSome { -A; +B; C <: (_$12, B) forSome { type _$12 } } + +abstract trait Cov38[-A,B,C <: (_$13, B) forSome { type _$13 }] extends Object + extest.Cov38[A,B,C] forSome { -A; B; C <: (_$13, B) forSome { type _$13 } } + +abstract trait Cov39[-A,-B,C <: Tuple2[_, _]] extends Object + extest.Cov39[_, _, _ <: Tuple2[_, _]] + +abstract trait Cov41[+A >: Null,+B] extends Object + extest.Cov41[_ >: Null, _] + +abstract trait Cov42[+A >: Null,B] extends Object + extest.Cov42[_ >: Null, _] + +abstract trait Cov43[+A >: Null,-B] extends Object + extest.Cov43[_ >: Null, _] + +abstract trait Cov44[A >: Null,+B] extends Object + extest.Cov44[_ >: Null, _] + +abstract trait Cov45[A >: Null,B] extends Object + extest.Cov45[_ >: Null, _] + +abstract trait Cov46[A >: Null,-B] extends Object + extest.Cov46[_ >: Null, _] + +abstract trait Cov47[-A >: Null,+B] extends Object + extest.Cov47[_ >: Null, _] + +abstract trait Cov48[-A >: Null,B] extends Object + extest.Cov48[_ >: Null, _] + +abstract trait Cov49[-A >: Null,-B] extends Object + extest.Cov49[_ >: Null, _] + +abstract trait Covariant[+A <: AnyRef,+B] extends Object + extest.Covariant[_ <: AnyRef, _] + +abstract trait CovariantLike[+A <: AnyRef,+B <: List[A],+This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object + extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] } + diff --git a/test/files/run/existentials-in-compiler.scala b/test/files/run/existentials-in-compiler.scala new file mode 100644 index 0000000000..e4f6920145 --- /dev/null +++ b/test/files/run/existentials-in-compiler.scala @@ -0,0 +1,83 @@ +import scala.tools.nsc._ +import scala.tools.partest.CompilerTest +import scala.collection.{ mutable, immutable, generic } + +object Test extends CompilerTest { + import global._ + import definitions._ + + def code = """ +package extest { + trait Bippy[A <: AnyRef, B] { } // wildcards + trait BippyLike[A <: AnyRef, B <: List[A], This <: BippyLike[A, B, This] with Bippy[A, B]] // no wildcards + trait BippyBud[A <: AnyRef, B, C <: List[A]] + + trait Cov01[+A <: AnyRef, +B] { } + trait Cov02[+A <: AnyRef, B] { } + trait Cov03[+A <: AnyRef, -B] { } + trait Cov04[ A <: AnyRef, +B] { } + trait Cov05[ A <: AnyRef, B] { } + trait Cov06[ A <: AnyRef, -B] { } + trait Cov07[-A <: AnyRef, +B] { } + trait Cov08[-A <: AnyRef, B] { } + trait Cov09[-A <: AnyRef, -B] { } + + trait Cov11[+A <: AnyRef, +B <: List[_]] { } + trait Cov12[+A <: AnyRef, B <: List[_]] { } + trait Cov13[+A <: AnyRef, -B <: List[_]] { } + trait Cov14[ A <: AnyRef, +B <: List[_]] { } + trait Cov15[ A <: AnyRef, B <: List[_]] { } + trait Cov16[ A <: AnyRef, -B <: List[_]] { } + trait Cov17[-A <: AnyRef, +B <: List[_]] { } + trait Cov18[-A <: AnyRef, B <: List[_]] { } + trait Cov19[-A <: AnyRef, -B <: List[_]] { } + + trait Cov21[+A, +B] { } + trait Cov22[+A, B] { } + trait Cov23[+A, -B] { } + trait Cov24[ A, +B] { } + trait Cov25[ A, B] { } + trait Cov26[ A, -B] { } + trait Cov27[-A, +B] { } + trait Cov28[-A, B] { } + trait Cov29[-A, -B] { } + + trait Cov31[+A, +B, C <: ((A, B))] { } + trait Cov32[+A, B, C <: ((A, B))] { } + trait Cov33[+A, -B, C <: ((A, _))] { } + trait Cov34[ A, +B, C <: ((A, B))] { } + trait Cov35[ A, B, C <: ((A, B))] { } + trait Cov36[ A, -B, C <: ((A, _))] { } + trait Cov37[-A, +B, C <: ((_, B))] { } + trait Cov38[-A, B, C <: ((_, B))] { } + trait Cov39[-A, -B, C <: ((_, _))] { } + + trait Cov41[+A >: Null, +B] { } + trait Cov42[+A >: Null, B] { } + trait Cov43[+A >: Null, -B] { } + trait Cov44[ A >: Null, +B] { } + trait Cov45[ A >: Null, B] { } + trait Cov46[ A >: Null, -B] { } + trait Cov47[-A >: Null, +B] { } + trait Cov48[-A >: Null, B] { } + trait Cov49[-A >: Null, -B] { } + + trait Covariant[+A <: AnyRef, +B] { } + trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: CovariantLike[A, B, This] with Covariant[A, B]] + + trait Contra[-A >: AnyRef, -B] { } + trait ContraLike[-A >: AnyRef, -B >: List[A]] +} + """ + + def check(source: String, unit: global.CompilationUnit) = { + getRequiredModule("extest").moduleClass.info.decls.toList.filter(_.isType).map(_.initialize).sortBy(_.name.toString) foreach { clazz => + afterTyper { + clazz.info + println(clazz.defString) + println(" " + classExistentialType(clazz) + "\n") + } + } + true + } +} -- cgit v1.2.3 From 8b06f1b27c54e9d02739d7062aad2711cd758188 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 16:23:05 -0800 Subject: Modest deverbosification of recent commit. Review by @scalamacros. --- .../scala/reflect/runtime/TreeBuildUtil.scala | 23 ++++++---------------- 1 file changed, 6 insertions(+), 17 deletions(-) diff --git a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala index 0b54843344..61001a4778 100644 --- a/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala +++ b/src/compiler/scala/reflect/runtime/TreeBuildUtil.scala @@ -2,23 +2,12 @@ package scala.reflect package runtime trait TreeBuildUtil extends Universe with api.TreeBuildUtil { - def staticClass(fullname: String): Symbol = { - val sym = definitions.getRequiredClass(fullname) - sym.initialize - sym - } - - def staticModule(fullname: String): Symbol = { - val sym = definitions.getRequiredModule(fullname) - sym.initialize - sym - } - - def thisModuleType(fullname: String) = { - val sym = staticModule(fullname).moduleClass - sym.initialize - sym.thisType - } + /** A comment to the effect of why initialize was added to all these + * would be appreciated. (We may as well start somewhere.) + */ + def staticClass(fullname: String) = definitions.getRequiredClass(fullname).initialize + def staticModule(fullname: String) = definitions.getRequiredModule(fullname).initialize + def thisModuleType(fullname: String) = staticModule(fullname).moduleClass.initialize.thisType /** Selects type symbol with given name from the defined members of prefix type */ -- cgit v1.2.3 From 57758b518b2409ea0cc1aa0be7853025aa053ab8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 21:05:16 -0800 Subject: Man, these one argument asserts. Some day, outlawed! --- src/compiler/scala/reflect/internal/Scopes.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/compiler/scala/reflect/internal/Scopes.scala b/src/compiler/scala/reflect/internal/Scopes.scala index 54d3de09cd..37464ebf29 100644 --- a/src/compiler/scala/reflect/internal/Scopes.scala +++ b/src/compiler/scala/reflect/internal/Scopes.scala @@ -120,7 +120,7 @@ trait Scopes extends api.Scopes { self: SymbolTable => * @param sym ... */ def enterUnique(sym: Symbol) { - assert(lookup(sym.name) == NoSymbol) + assert(lookup(sym.name) == NoSymbol, (sym.fullLocationString, lookup(sym.name).fullLocationString)) enter(sym) } -- cgit v1.2.3 From eb8556ca663de9bf77514eab6e63f0a2f7599413 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 21:55:15 -0800 Subject: Fix for SI-5444. Fix for trait/impl renaming in 5cbd7d06eb was incomplete. Looks more complete now. --- src/compiler/scala/reflect/internal/Symbols.scala | 9 +++-- .../scala/tools/nsc/transform/AddInterfaces.scala | 5 ++- .../scala/tools/nsc/transform/LambdaLift.scala | 31 +++++++++++----- .../scala/tools/nsc/typechecker/Namers.scala | 2 +- test/files/pos/t5444.scala | 42 ++++++++++++++++++++++ 5 files changed, 74 insertions(+), 15 deletions(-) create mode 100644 test/files/pos/t5444.scala diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 69d881e8e1..77ed2f6a1b 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1704,6 +1704,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => * (which is always the interface, by convention) * - before erasure, it looks up the interface name in the scope of the owner of the class. * This only works for implementation classes owned by other classes or traits. + * !!! Why? */ final def toInterface: Symbol = if (isImplClass) { @@ -2080,6 +2081,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => def infosString = infos.toString() + def debugLocationString = fullLocationString + " " + debugFlagString + def debugFlagString = hasFlagsToString(-1L) def hasFlagsToString(mask: Long): String = flagsToString( flags & mask, if (hasAccessBoundary) privateWithin.toString else "" @@ -2178,7 +2181,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => } def setLazyAccessor(sym: Symbol): TermSymbol = { - assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, hasFlagsToString(-1L), referenced, sym)) + assert(isLazy && (referenced == NoSymbol || referenced == sym), (this, debugFlagString, referenced, sym)) referenced = sym this } @@ -2319,7 +2322,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => /** Overridden in subclasses for which it makes sense. */ - def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+this.fullLocationString+ " " + hasFlagsToString(-1L)) + def existentialBound: Type = abort("unexpected type: "+this.getClass+ " "+debugLocationString) override def name: TypeName = super.name.asInstanceOf[TypeName] final override def isType = true @@ -2327,7 +2330,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def isAbstractType = { if (settings.debug.value) { if (isDeferred) { - println("TypeSymbol claims to be abstract type: " + this.getClass + " " + hasFlagsToString(-1L) + " at ") + println("TypeSymbol claims to be abstract type: " + this.getClass + " " + debugFlagString + " at ") (new Throwable).printStackTrace } } diff --git a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala index e01bbccf13..b4ec8a23ce 100644 --- a/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala +++ b/src/compiler/scala/tools/nsc/transform/AddInterfaces.scala @@ -94,7 +94,6 @@ abstract class AddInterfaces extends InfoTransform { // error: java.lang.AssertionError: assertion failed: (scala.tools.nsc.typechecker.Contexts$NoContext$,scala.tools.nsc.typechecker.Contexts,NoContext$,trait Contexts in package typechecker) / while parsing (/scala/trunk/build/pack/lib/scala-compiler.jar(scala/tools/nsc/interactive/ContextTrees$class.class),Some(class ContextTrees$class))trait Contexts.NoContext$ linkedModule: List() val originalImpl = impl - val originalImplString = originalImpl.hasFlagsToString(-1L) if (impl != NoSymbol) { // Unlink a pre-existing symbol only if the implementation class is // visible on the compilation classpath. In general this is true under @@ -120,8 +119,8 @@ abstract class AddInterfaces extends InfoTransform { impl setInfo new LazyImplClassType(iface) implClassMap(iface) = impl debuglog( - "generating impl class " + impl + " " + impl.hasFlagsToString(-1L) + " in " + iface.owner + ( - if (originalImpl == NoSymbol) "" else " (cloned from " + originalImpl.fullLocationString + " " + originalImplString + ")" + "generating impl class " + impl.debugLocationString + " in " + iface.owner + ( + if (originalImpl == NoSymbol) "" else " (cloned from " + originalImpl.debugLocationString + ")" ) ) impl diff --git a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala index 712298bd89..4fc7b9f92f 100644 --- a/src/compiler/scala/tools/nsc/transform/LambdaLift.scala +++ b/src/compiler/scala/tools/nsc/transform/LambdaLift.scala @@ -65,7 +65,10 @@ abstract class LambdaLift extends InfoTransform { /** The set of symbols that need to be renamed. */ private val renamable = newSymSet - private val renamableImplClasses = mutable.HashMap[Name, Symbol]() withDefaultValue NoSymbol + // (trait, name) -> owner + private val localTraits = mutable.HashMap[(Symbol, Name), Symbol]() + // (owner, name) -> implClass + private val localImplClasses = mutable.HashMap[(Symbol, Name), Symbol]() /** A flag to indicate whether new free variables have been found */ private var changedFreeVars: Boolean = _ @@ -167,8 +170,13 @@ abstract class LambdaLift extends InfoTransform { // arrangements, and then have separate methods which attempt to compensate // for that failure. There should be exactly one method for any given // entity which always gives the right answer. - if (sym.isImplClass) renamableImplClasses(nme.interfaceName(sym.name)) = sym - else renamable addEntry sym + if (sym.isImplClass) + localImplClasses((sym.owner, nme.interfaceName(sym.name))) = sym + else { + renamable addEntry sym + if (sym.isTrait) + localTraits((sym, sym.name)) = sym.owner + } } case DefDef(_, _, _, _, _, _) => if (sym.isLocal) { @@ -237,14 +245,21 @@ abstract class LambdaLift extends InfoTransform { debuglog("renaming impl class in step with %s: %s => %s".format(traitSym, originalImplName, implSym.name)) } - + for (sym <- renamable) { // If we renamed a trait from Foo to Foo$1, we must rename the implementation // class from Foo$class to Foo$1$class. (Without special consideration it would - // become Foo$class$1 instead.) - val implClass = if (sym.isTrait) renamableImplClasses(sym.name) else NoSymbol - if ((implClass ne NoSymbol) && (sym.owner == implClass.owner)) renameTrait(sym, implClass) - else renameSym(sym) + // become Foo$class$1 instead.) Since the symbols are being renamed out from + // under us, and there's no reliable link between trait symbol and impl symbol, + // we have maps from ((trait, name)) -> owner and ((owner, name)) -> impl. + localTraits remove ((sym, sym.name)) match { + case None => renameSym(sym) + case Some(owner) => + localImplClasses remove ((owner, sym.name)) match { + case Some(implSym) => renameTrait(sym, implSym) + case _ => renameSym(sym) // pure interface, no impl class + } + } } atPhase(phase.next) { diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 0ff2b418f4..51542ec757 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -282,7 +282,7 @@ trait Namers extends MethodSynthesis { } private def logAssignSymbol(tree: Tree, sym: Symbol): Symbol = { - log("[+symbol] " + sym.hasFlagsToString(-1L) + " " + sym) + log("[+symbol] " + sym.debugLocationString) tree.symbol = sym sym } diff --git a/test/files/pos/t5444.scala b/test/files/pos/t5444.scala new file mode 100644 index 0000000000..df6b2ce4f8 --- /dev/null +++ b/test/files/pos/t5444.scala @@ -0,0 +1,42 @@ +// /scala/trac/5444/a.scala +// Mon Feb 13 21:01:45 PST 2012 + +// Traits require identical names to reproduce. +class Test { + def a() = { + trait T { + def x() = 1 + } + trait U { + def x1() = 2 + } + class Bippy extends T with U { def z() = x() + x1() } + new Bippy + } + def b() { + trait T { + def y() = 3 + trait T2 { + def yy() = 10 + } + } + trait U { + def y1() = 4 + trait T3 { + def yy() = 11 + } + } + class Bippy extends T with U { def z() = y() + y1() + (1 to (new T2 { }).yy()).map(_ + 1).sum } + (new Bippy).z() + } + def c() { + trait T { + def z() = 5 + } + trait U { + def z1() = 6 + } + (new Test with T with U).z1() + } +} + -- cgit v1.2.3 From 2b731911e97a281e324060099631e2374b2144ec Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 23:09:39 -0800 Subject: Fix for SI-5452. Why is calling the error function not enough to register the error, why does "setError(tree)" have to be called as well? That was the cause of this particular stackoverflow. In ContextErrors I see lots of methods which call setError and lots more which do not, and frankly it's all pretty terrifying. There is zero documentation attached to setError. Maybe there's an explanation somewhere I'm not seeing. Review by @hubertp. --- .../tools/nsc/typechecker/ContextErrors.scala | 4 ++- test/files/neg/t5452.check | 6 +++++ test/files/neg/t5452.scala | 29 ++++++++++++++++++++++ 3 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 test/files/neg/t5452.check create mode 100644 test/files/neg/t5452.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 6ee09d064f..466b5125a8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -714,9 +714,11 @@ trait ContextErrors { setError(tree) } - def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = { issueNormalTypeError(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) + setError(tree) + } def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, argtpes: List[Type], pt: Type) = { diff --git a/test/files/neg/t5452.check b/test/files/neg/t5452.check new file mode 100644 index 0000000000..baf544499b --- /dev/null +++ b/test/files/neg/t5452.check @@ -0,0 +1,6 @@ +t5452.scala:28: error: overloaded method value apply with alternatives: + + cannot be applied to (Queryable[CoffeesTable]) + Queryable[CoffeesTable]( q.treeFilter(null) ) + ^ +one error found diff --git a/test/files/neg/t5452.scala b/test/files/neg/t5452.scala new file mode 100644 index 0000000000..1032db7a4b --- /dev/null +++ b/test/files/neg/t5452.scala @@ -0,0 +1,29 @@ +// /scala/trac/5452/a.scala +// Mon Feb 13 22:52:36 PST 2012 + +// import scala.reflect.mirror._ + +trait Tree + +object Bip { + def ??? = sys.error("") +} +import Bip._ + +case class Queryable[T]() { + def treeFilter( t:Tree ) : Queryable[T] = ??? +} + +object Queryable { + def apply[T:Manifest] = ??? + def apply[T:Manifest]( t:Tree ) = ??? +} + +trait CoffeesTable{ + def sales : Int +} + +object Test extends App{ + val q = new Queryable[CoffeesTable] + Queryable[CoffeesTable]( q.treeFilter(null) ) +} -- cgit v1.2.3 From bd988df6448bfeacf8fa6bf35b21881d3073a547 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 13 Feb 2012 23:29:06 -0800 Subject: Fix for set/list/oops bug in MutableSettings#copy. Closes SI-5446. I am morally certain that fixes of this nature could be performed by someone who has logged fewer than ten thousand hours with the compiler. --- src/compiler/scala/tools/nsc/settings/MutableSettings.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala index b468e7c0af..f99d1399c0 100644 --- a/src/compiler/scala/tools/nsc/settings/MutableSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/MutableSettings.scala @@ -95,8 +95,7 @@ class MutableSettings(val errorFn: String => Unit) */ def copy(): Settings = { val s = new Settings() - val xs = userSetSettings flatMap (_.unparse) - s.processArguments(xs.toList, true) + s.processArguments(recreateArgs, true) s } -- cgit v1.2.3 From a725bf982c06e16c5d533ea6b2227b726db4f7e4 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 14 Feb 2012 06:16:09 -0800 Subject: Make fix for SI-5452 not break other things. If this looks hacky, that's because it is. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 4 +--- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 7 ++++++- test/files/neg/t5452.check | 4 +++- 3 files changed, 10 insertions(+), 5 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 466b5125a8..6ee09d064f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -714,11 +714,9 @@ trait ContextErrors { setError(tree) } - def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = { + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = issueNormalTypeError(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) - setError(tree) - } def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, argtpes: List[Type], pt: Type) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index b97fbebec2..acf905d974 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1466,7 +1466,9 @@ trait Infer { argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match { case OverloadedType(pre, alts) => val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 + var secondTry = true tryTwice { + secondTry = !secondTry debuglog("infer method alt "+ tree.symbol +" with alternatives "+ (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt) @@ -1488,8 +1490,11 @@ trait Infer { if (improves(alt, best)) alt else best) val competing = applicable.dropWhile(alt => best == alt || improves(best, alt)) if (best == NoSymbol) { - if (pt == WildcardType) + if (pt == WildcardType) { NoBestMethodAlternativeError(tree, argtpes, pt) + if (secondTry) + setError(tree) + } else inferMethodAlternative(tree, undetparams, argtpes, WildcardType) } else if (!competing.isEmpty) { diff --git a/test/files/neg/t5452.check b/test/files/neg/t5452.check index baf544499b..2f35a45509 100644 --- a/test/files/neg/t5452.check +++ b/test/files/neg/t5452.check @@ -1,5 +1,7 @@ t5452.scala:28: error: overloaded method value apply with alternatives: - + ()Queryable[CoffeesTable] + (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing + (implicit evidence$1: Manifest[CoffeesTable])Nothing cannot be applied to (Queryable[CoffeesTable]) Queryable[CoffeesTable]( q.treeFilter(null) ) ^ -- cgit v1.2.3 From afa5078d55153a26fb9d0af0ad5bd03bcda93fc0 Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Tue, 14 Feb 2012 09:22:17 -0500 Subject: changed repo id to denote new credentials for new serer. --- src/build/maven/maven-deploy.xml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml index 510ddca393..679f45ab54 100644 --- a/src/build/maven/maven-deploy.xml +++ b/src/build/maven/maven-deploy.xml @@ -14,7 +14,7 @@ - + Using server[${repository.credentials.id}] for maven repository credentials. -- cgit v1.2.3 From 81805c7b28efbfd9132cac41840a5231763d370e Mon Sep 17 00:00:00 2001 From: Josh Suereth Date: Mon, 13 Feb 2012 09:22:00 -0500 Subject: Modified repository deployments to point to OSSRH --- src/build/maven/maven-deploy.xml | 7 ++----- 1 file changed, 2 insertions(+), 5 deletions(-) diff --git a/src/build/maven/maven-deploy.xml b/src/build/maven/maven-deploy.xml index 9fddca3c73..510ddca393 100644 --- a/src/build/maven/maven-deploy.xml +++ b/src/build/maven/maven-deploy.xml @@ -9,11 +9,8 @@ - - - - + + -- cgit v1.2.3 From f6174691647bb1fae7e86d53790b1e540b890755 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 14 Feb 2012 00:05:18 +0100 Subject: Preprocessing for reifees --- src/compiler/scala/tools/nsc/ast/Reifiers.scala | 149 +++++++++++++++++++----- test/files/run/t5271_1.check | 0 test/files/run/t5271_1.scala | 14 +++ test/files/run/t5271_2.check | 1 + test/files/run/t5271_2.scala | 16 +++ test/files/run/t5271_3.check | 1 + test/files/run/t5271_3.scala | 17 +++ test/files/run/t5271_4.check | 0 test/files/run/t5271_4.scala | 14 +++ test/files/run/t5273_1.check | 1 + test/files/run/t5273_1.scala | 17 +++ test/files/run/t5273_2a.check | 1 + test/files/run/t5273_2a.scala | 15 +++ test/files/run/t5273_2b.check | 1 + test/files/run/t5273_2b.scala | 16 +++ test/files/run/t5276_1a.check | 1 + test/files/run/t5276_1a.scala | 15 +++ test/files/run/t5276_1b.check | 1 + test/files/run/t5276_1b.scala | 15 +++ test/files/run/t5276_2a.check | 1 + test/files/run/t5276_2a.scala | 18 +++ test/files/run/t5276_2b.check | 1 + test/files/run/t5276_2b.scala | 19 +++ test/pending/run/t5271_1.check | 0 test/pending/run/t5271_1.scala | 14 --- test/pending/run/t5271_2.check | 1 - test/pending/run/t5271_2.scala | 16 --- test/pending/run/t5273_1.check | 1 - test/pending/run/t5273_1.scala | 16 --- test/pending/run/t5273_2.check | 1 - test/pending/run/t5273_2.scala | 17 --- test/pending/run/t5276.check | 1 - test/pending/run/t5276.scala | 15 --- 33 files changed, 303 insertions(+), 113 deletions(-) create mode 100644 test/files/run/t5271_1.check create mode 100644 test/files/run/t5271_1.scala create mode 100644 test/files/run/t5271_2.check create mode 100644 test/files/run/t5271_2.scala create mode 100644 test/files/run/t5271_3.check create mode 100644 test/files/run/t5271_3.scala create mode 100644 test/files/run/t5271_4.check create mode 100644 test/files/run/t5271_4.scala create mode 100644 test/files/run/t5273_1.check create mode 100644 test/files/run/t5273_1.scala create mode 100644 test/files/run/t5273_2a.check create mode 100644 test/files/run/t5273_2a.scala create mode 100644 test/files/run/t5273_2b.check create mode 100644 test/files/run/t5273_2b.scala create mode 100644 test/files/run/t5276_1a.check create mode 100644 test/files/run/t5276_1a.scala create mode 100644 test/files/run/t5276_1b.check create mode 100644 test/files/run/t5276_1b.scala create mode 100644 test/files/run/t5276_2a.check create mode 100644 test/files/run/t5276_2a.scala create mode 100644 test/files/run/t5276_2b.check create mode 100644 test/files/run/t5276_2b.scala delete mode 100644 test/pending/run/t5271_1.check delete mode 100644 test/pending/run/t5271_1.scala delete mode 100644 test/pending/run/t5271_2.check delete mode 100644 test/pending/run/t5271_2.scala delete mode 100644 test/pending/run/t5273_1.check delete mode 100644 test/pending/run/t5273_1.scala delete mode 100644 test/pending/run/t5273_2.check delete mode 100644 test/pending/run/t5273_2.scala delete mode 100644 test/pending/run/t5276.check delete mode 100644 test/pending/run/t5276.scala diff --git a/src/compiler/scala/tools/nsc/ast/Reifiers.scala b/src/compiler/scala/tools/nsc/ast/Reifiers.scala index 91d5d2bf4a..21e075950f 100644 --- a/src/compiler/scala/tools/nsc/ast/Reifiers.scala +++ b/src/compiler/scala/tools/nsc/ast/Reifiers.scala @@ -8,6 +8,7 @@ package ast import symtab._ import Flags._ +import scala.reflect.api.Modifier._ import scala.collection.{ mutable, immutable } import scala.collection.mutable.ListBuffer import scala.tools.nsc.util.FreshNameCreator @@ -289,10 +290,102 @@ trait Reifiers { self: Global => var reifySymbols = false var reifyTypes = false + /** Preprocess a tree before reification */ + private def trimTree(tree: Tree): Tree = { + def trimSyntheticCaseClassMembers(deff: Tree, stats: List[Tree]) = { + var stats1 = stats filterNot (stat => stat.isDef && { + if (stat.symbol.isCaseAccessorMethod && reifyDebug) println("discarding case accessor method: " + stat) + stat.symbol.isCaseAccessorMethod + }) + stats1 = stats1 filterNot (memberDef => memberDef.isDef && { + val isSynthetic = memberDef.symbol.isSynthetic + // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass) + // that's why I replace the check with an assumption that all synthetic members are, in fact, generated of case classes +// val isCaseMember = deff.symbol.isCaseClass || deff.symbol.companionClass.isCaseClass + val isCaseMember = true + if (isSynthetic && isCaseMember && reifyDebug) println("discarding case class synthetic def: " + memberDef) + isSynthetic && isCaseMember + }) + stats1 = stats1 map { + case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isCaseAccessor => + if (reifyDebug) println("resetting visibility of case accessor field: " + valdef) + val Modifiers(flags, privateWithin, annotations) = mods + val flags1 = flags & ~Flags.LOCAL & ~Flags.PRIVATE + val mods1 = Modifiers(flags1, privateWithin, annotations) + ValDef(mods1, name, tpt, rhs).copyAttrs(valdef) + case stat => + stat + } + stats1 + } + + def trimSyntheticCaseClassCompanions(stats: List[Tree]) = + stats diff (stats collect { case moddef: ModuleDef => moddef } filter (moddef => { + val isSynthetic = moddef.symbol.isSynthetic + // @xeno.by: this doesn't work for local classes, e.g. for ones that are top-level to a quasiquote (see comments to companionClass) + // that's why I replace the check with an assumption that all synthetic modules are, in fact, companions of case classes +// val isCaseCompanion = moddef.symbol.companionClass.isCaseClass + val isCaseCompanion = true + // @xeno.by: we also have to do this ugly hack for the very same reason described above + // normally this sort of stuff is performed in reifyTree, which binds related symbols, however, local companions will be out of its reach + if (reifyDebug) println("boundSym: "+ moddef.symbol) + boundSyms += moddef.symbol + if (isSynthetic && isCaseCompanion && reifyDebug) println("discarding synthetic case class companion: " + moddef) + isSynthetic && isCaseCompanion + })) + + tree match { + case tree if tree.isErroneous => + tree + case ta @ TypeApply(hk, ts) => + def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null + val discard = ts collect { case tt: TypeTree => tt } exists isErased + if (reifyDebug && discard) println("discarding TypeApply: " + tree) + if (discard) hk else ta + case classDef @ ClassDef(mods, name, params, impl) => + val Template(parents, self, body) = impl + val body1 = trimSyntheticCaseClassMembers(classDef, body) + var impl1 = Template(parents, self, body1).copyAttrs(impl) + ClassDef(mods, name, params, impl1).copyAttrs(classDef) + case moduledef @ ModuleDef(mods, name, impl) => + val Template(parents, self, body) = impl + val body1 = trimSyntheticCaseClassMembers(moduledef, body) + var impl1 = Template(parents, self, body1).copyAttrs(impl) + ModuleDef(mods, name, impl1).copyAttrs(moduledef) + case template @ Template(parents, self, body) => + val body1 = trimSyntheticCaseClassCompanions(body) + Template(parents, self, body1).copyAttrs(template) + case block @ Block(stats, expr) => + val stats1 = trimSyntheticCaseClassCompanions(stats) + Block(stats1, expr).copyAttrs(block) + case valdef @ ValDef(mods, name, tpt, rhs) if valdef.symbol.isLazy => + if (reifyDebug) println("dropping $lzy in lazy val's name: " + tree) + val name1 = if (name endsWith nme.LAZY_LOCAL) name dropRight nme.LAZY_LOCAL.length else name + ValDef(mods, name1, tpt, rhs).copyAttrs(valdef) + case unapply @ UnApply(fun, args) => + def extractExtractor(tree: Tree): Tree = { + val Apply(fun, args) = tree + args match { + case List(Ident(special)) if special == nme.SELECTOR_DUMMY => + val Select(extractor, flavor) = fun + assert(flavor == nme.unapply || flavor == nme.unapplySeq) + extractor + case _ => + extractExtractor(fun) + } + } + + if (reifyDebug) println("unapplying unapply: " + tree) + val fun1 = extractExtractor(fun) + Apply(fun1, args).copyAttrs(unapply) + case _ => + tree + } + } + /** Reify a tree */ - private def reifyTree(tree: Tree): Tree = { - def reifyDefault(tree: Tree) = - reifyProduct(tree) + private def reifyTree(tree0: Tree): Tree = { + val tree = trimTree(tree0) var rtree = tree match { case tree if tree.isErroneous => @@ -311,29 +404,24 @@ trait Reifiers { self: Global => } else reifyFree(tree) case tt: TypeTree if (tt.tpe != null) => reifyTypeTree(tt) - case ta @ TypeApply(hk, ts) => - def isErased(tt: TypeTree) = tt.tpe != null && definedInLiftedCode(tt.tpe) && tt.original == null - val discard = ts collect { case tt: TypeTree => tt } exists isErased - if (reifyDebug && discard) println("discarding TypeApply: " + tree) - if (discard) reifyTree(hk) else reifyDefault(ta) case Literal(constant @ Constant(tpe: Type)) if boundSyms exists (tpe contains _) => CannotReifyClassOfBoundType(tree, tpe) case Literal(constant @ Constant(sym: Symbol)) if boundSyms contains sym => CannotReifyClassOfBoundEnum(tree, constant.tpe) case tree if tree.isDef => if (reifyDebug) println("boundSym: %s of type %s".format(tree.symbol, (tree.productIterator.toList collect { case tt: TypeTree => tt } headOption).getOrElse(TypeTree(tree.tpe)))) - // registerReifiableSymbol(tree.symbol) boundSyms += tree.symbol - if (tree.symbol.sourceModule != NoSymbol) { - if (reifyDebug) println("boundSym (sourceModule): " + tree.symbol.sourceModule) - boundSyms += tree.symbol.sourceModule - } - - if (tree.symbol.moduleClass != NoSymbol) { - if (reifyDebug) println("boundSym (moduleClass): " + tree.symbol.moduleClass) - boundSyms += tree.symbol.moduleClass - } + bindRelatedSymbol(tree.symbol.sourceModule, "sourceModule") + bindRelatedSymbol(tree.symbol.moduleClass, "moduleClass") + bindRelatedSymbol(tree.symbol.companionClass, "companionClass") + bindRelatedSymbol(tree.symbol.companionModule, "companionModule") + Some(tree.symbol) collect { case termSymbol: TermSymbol => bindRelatedSymbol(termSymbol.referenced, "referenced") } + def bindRelatedSymbol(related: Symbol, name: String): Unit = + if (related != null && related != NoSymbol) { + if (reifyDebug) println("boundSym (" + name + "): " + related) + boundSyms += related + } val prefix = tree.productPrefix val elements = (tree.productIterator map { @@ -354,7 +442,7 @@ trait Reifiers { self: Global => }).toList reifyProduct(prefix, elements) case _ => - reifyDefault(tree) + reifyProduct(tree) } // usually we don't reify symbols/types, because they can be re-inferred during subsequent reflective compilation @@ -396,10 +484,8 @@ trait Reifiers { self: Global => * * This workaround worked surprisingly well and allowed me to fix several important reification bugs, until the abstraction has leaked. * Suddenly I found out that in certain contexts original trees do not contain symbols, but are just parser trees. - * To the moment I know two such situations: - * 1) Unapplies: https://issues.scala-lang.org/browse/SI-5273?focusedCommentId=56057#comment-56057 - * 2) Annotations: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless - * 3) + * To the moment I know only one such situation: typedAnnotations does not typecheck the annotation in-place, but rather creates new trees and typechecks them, so the original remains symless. + * This is laboriously worked around in the code below. I hope this will be the only workaround in this department. */ private def reifyTypeTree(tt: TypeTree): Tree = { if (definedInLiftedCode(tt.tpe)) { @@ -441,14 +527,15 @@ trait Reifiers { self: Global => } } else { var rtt = mirrorCall(nme.TypeTree, reifyType(tt.tpe)) - // @xeno.by: originals get typechecked during subsequent reflective compilation, which leads to subtle bugs - // https://issues.scala-lang.org/browse/SI-5273?focusedCommentId=56057#comment-56057 - // until this is somehow sorted out, I disable reification of originals - // if (tt.original != null) { - // val setOriginal = Select(rtt, newTermName("setOriginal")) - // val reifiedOriginal = reify(tt.original) - // rtt = Apply(setOriginal, List(reifiedOriginal)) - // } + // @xeno.by: temporarily disabling reification of originals + // subsequent reflective compilation will try to typecheck them + // and this means that the reifier has to do additional efforts to ensure that this will succeed + // additional efforts + no clear benefit = will be implemented later +// if (tt.original != null) { +// val setOriginal = Select(rtt, newTermName("setOriginal")) +// val reifiedOriginal = reify(tt.original) +// rtt = Apply(setOriginal, List(reifiedOriginal)) +// } rtt } } diff --git a/test/files/run/t5271_1.check b/test/files/run/t5271_1.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/t5271_1.scala b/test/files/run/t5271_1.scala new file mode 100644 index 0000000000..5f10e64528 --- /dev/null +++ b/test/files/run/t5271_1.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + case class C(foo: Int, bar: Int) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5271_2.check b/test/files/run/t5271_2.check new file mode 100644 index 0000000000..b8626c4cff --- /dev/null +++ b/test/files/run/t5271_2.check @@ -0,0 +1 @@ +4 diff --git a/test/files/run/t5271_2.scala b/test/files/run/t5271_2.scala new file mode 100644 index 0000000000..71967c04ed --- /dev/null +++ b/test/files/run/t5271_2.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + case class C(foo: Int, bar: Int) + val c = C(2, 2) + println(c.foo * c.bar) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check new file mode 100644 index 0000000000..f32a5804e2 --- /dev/null +++ b/test/files/run/t5271_3.check @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/test/files/run/t5271_3.scala b/test/files/run/t5271_3.scala new file mode 100644 index 0000000000..bfa116c691 --- /dev/null +++ b/test/files/run/t5271_3.scala @@ -0,0 +1,17 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C { def qwe = 4 } + case class C(foo: Int, bar: Int) + val c = C(2, 2) + println(c.foo * c.bar == C.qwe) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5271_4.check b/test/files/run/t5271_4.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/files/run/t5271_4.scala b/test/files/run/t5271_4.scala new file mode 100644 index 0000000000..e5e16033e8 --- /dev/null +++ b/test/files/run/t5271_4.scala @@ -0,0 +1,14 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + case object C + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5273_1.check b/test/files/run/t5273_1.check new file mode 100644 index 0000000000..0cfbf08886 --- /dev/null +++ b/test/files/run/t5273_1.check @@ -0,0 +1 @@ +2 diff --git a/test/files/run/t5273_1.scala b/test/files/run/t5273_1.scala new file mode 100644 index 0000000000..1175881c9f --- /dev/null +++ b/test/files/run/t5273_1.scala @@ -0,0 +1,17 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + List(1, 2, 3) match { + case foo :: bar :: _ => println(foo * bar) + case _ => println("this is getting out of hand!") + } + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5273_2a.check b/test/files/run/t5273_2a.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/t5273_2a.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/t5273_2a.scala b/test/files/run/t5273_2a.scala new file mode 100644 index 0000000000..12ddbb280a --- /dev/null +++ b/test/files/run/t5273_2a.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + val foo :: bar :: _ = List(1, 2, 3) + println(foo * bar) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5273_2b.check b/test/files/run/t5273_2b.check new file mode 100644 index 0000000000..c551774ca5 --- /dev/null +++ b/test/files/run/t5273_2b.check @@ -0,0 +1 @@ +name = American Dollar, shortname = USD, value = 2,8567 diff --git a/test/files/run/t5273_2b.scala b/test/files/run/t5273_2b.scala new file mode 100644 index 0000000000..8b75084463 --- /dev/null +++ b/test/files/run/t5273_2b.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + val RegexParser = """(.*) \d+([A-Z]+) \| (.*) \|.*""".r + val RegexParser(name, shortname, value) = "American Dollar 1USD | 2,8567 | sometext" + println("name = %s, shortname = %s, value = %s".format(name, shortname, value)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5276_1a.check b/test/files/run/t5276_1a.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/t5276_1a.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/t5276_1a.scala b/test/files/run/t5276_1a.scala new file mode 100644 index 0000000000..c8afbba19e --- /dev/null +++ b/test/files/run/t5276_1a.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + lazy val x = 2 + println(x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5276_1b.check b/test/files/run/t5276_1b.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/t5276_1b.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/t5276_1b.scala b/test/files/run/t5276_1b.scala new file mode 100644 index 0000000000..31582201fb --- /dev/null +++ b/test/files/run/t5276_1b.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + implicit lazy val x = 2 + implicitly[Int] + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5276_2a.check b/test/files/run/t5276_2a.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/t5276_2a.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/t5276_2a.scala b/test/files/run/t5276_2a.scala new file mode 100644 index 0000000000..179c14b739 --- /dev/null +++ b/test/files/run/t5276_2a.scala @@ -0,0 +1,18 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { + lazy val x = 2 + } + + println(new C().x) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/files/run/t5276_2b.check b/test/files/run/t5276_2b.check new file mode 100644 index 0000000000..d8263ee986 --- /dev/null +++ b/test/files/run/t5276_2b.check @@ -0,0 +1 @@ +2 \ No newline at end of file diff --git a/test/files/run/t5276_2b.scala b/test/files/run/t5276_2b.scala new file mode 100644 index 0000000000..6fe2873fef --- /dev/null +++ b/test/files/run/t5276_2b.scala @@ -0,0 +1,19 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class C { + implicit lazy val x = 2 + def y = implicitly[Int] + } + + println(new C().y) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + val ttree = toolbox.typeCheck(code.tree) + toolbox.runExpr(ttree) +} diff --git a/test/pending/run/t5271_1.check b/test/pending/run/t5271_1.check deleted file mode 100644 index e69de29bb2..0000000000 diff --git a/test/pending/run/t5271_1.scala b/test/pending/run/t5271_1.scala deleted file mode 100644 index 5f10e64528..0000000000 --- a/test/pending/run/t5271_1.scala +++ /dev/null @@ -1,14 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - case class C(foo: Int, bar: Int) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5271_2.check b/test/pending/run/t5271_2.check deleted file mode 100644 index b8626c4cff..0000000000 --- a/test/pending/run/t5271_2.check +++ /dev/null @@ -1 +0,0 @@ -4 diff --git a/test/pending/run/t5271_2.scala b/test/pending/run/t5271_2.scala deleted file mode 100644 index 71967c04ed..0000000000 --- a/test/pending/run/t5271_2.scala +++ /dev/null @@ -1,16 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - case class C(foo: Int, bar: Int) - val c = C(2, 2) - println(c.foo * c.bar) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5273_1.check b/test/pending/run/t5273_1.check deleted file mode 100644 index c551774ca5..0000000000 --- a/test/pending/run/t5273_1.check +++ /dev/null @@ -1 +0,0 @@ -name = American Dollar, shortname = USD, value = 2,8567 diff --git a/test/pending/run/t5273_1.scala b/test/pending/run/t5273_1.scala deleted file mode 100644 index 8b75084463..0000000000 --- a/test/pending/run/t5273_1.scala +++ /dev/null @@ -1,16 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - val RegexParser = """(.*) \d+([A-Z]+) \| (.*) \|.*""".r - val RegexParser(name, shortname, value) = "American Dollar 1USD | 2,8567 | sometext" - println("name = %s, shortname = %s, value = %s".format(name, shortname, value)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5273_2.check b/test/pending/run/t5273_2.check deleted file mode 100644 index 0cfbf08886..0000000000 --- a/test/pending/run/t5273_2.check +++ /dev/null @@ -1 +0,0 @@ -2 diff --git a/test/pending/run/t5273_2.scala b/test/pending/run/t5273_2.scala deleted file mode 100644 index 1175881c9f..0000000000 --- a/test/pending/run/t5273_2.scala +++ /dev/null @@ -1,17 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - List(1, 2, 3) match { - case foo :: bar :: _ => println(foo * bar) - case _ => println("this is getting out of hand!") - } - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5276.check b/test/pending/run/t5276.check deleted file mode 100644 index 0cfbf08886..0000000000 --- a/test/pending/run/t5276.check +++ /dev/null @@ -1 +0,0 @@ -2 diff --git a/test/pending/run/t5276.scala b/test/pending/run/t5276.scala deleted file mode 100644 index 432fdb91e4..0000000000 --- a/test/pending/run/t5276.scala +++ /dev/null @@ -1,15 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - lazy x = 2 - println(x) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} -- cgit v1.2.3 From 94166682eed2a49038d9bdc1515e0b2f0630ab20 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 14 Feb 2012 08:32:34 -0800 Subject: Made speclib less of a maintenance headache. Now it copies in the current versions of BoxesRunTime and ScalaRunTime and applies patches to them, and the whole build is automated. # This is the only thing I actually typed, the rest is fancy echo. $ test/instrumented/mkinstrumented.sh build % rm -rf /scratch/trunk1/test/instrumented/classes % cp /scratch/trunk1/test/instrumented/../../src/library/scala/runtime/BoxesRunTime.java /scratch/trunk1/test/instrumented/../../src/library/scala/runtime/ScalaRunTime.scala /scratch/trunk1/test/instrumented/library/scala/runtime % patch BoxesRunTime.java /scratch/trunk1/test/instrumented/boxes.patch patching file BoxesRunTime.java % patch ScalaRunTime.scala /scratch/trunk1/test/instrumented/srt.patch patching file ScalaRunTime.scala Hunk #3 succeeded at 63 (offset 23 lines). Hunk #4 succeeded at 78 (offset 23 lines). Hunk #5 succeeded at 81 (offset 23 lines). Hunk #6 succeeded at 96 (offset 23 lines). % /scratch/trunk1/test/instrumented/../../build/pack/bin/scalac -d /scratch/trunk1/test/instrumented/classes /scratch/trunk1/test/instrumented/library/scala/runtime/BoxesRunTime.java /scratch/trunk1/test/instrumented/library/scala/runtime/ScalaRunTime.scala % javac -cp /scratch/trunk1/test/instrumented/../../build/pack/lib/scala-library.jar -d /scratch/trunk1/test/instrumented/classes /scratch/trunk1/test/instrumented/library/scala/runtime/BoxesRunTime.java % cd /scratch/trunk1/test/instrumented/classes % jar cf instrumented.jar . % mv -f instrumented.jar /scratch/trunk1/test/instrumented/../../test/files/speclib /scratch/trunk1/test/files/speclib/instrumented.jar has been created. --- instrumented.jar.desired.sha1 | 1 + src/library/scala/runtime/ScalaRunTime.scala | 11 +- test/files/speclib/instrumented.jar.desired.sha1 | 2 +- test/instrumented/boxes.patch | 29 +++ .../library/scala/runtime/BoxesRunTime.java | 116 ++++++---- .../library/scala/runtime/ScalaRunTime.scala | 255 +++++++++++++-------- test/instrumented/mkinstrumented | 46 ---- test/instrumented/mkinstrumented.sh | 51 +++++ test/instrumented/srt.patch | 23 ++ 9 files changed, 337 insertions(+), 197 deletions(-) create mode 100644 instrumented.jar.desired.sha1 create mode 100644 test/instrumented/boxes.patch delete mode 100755 test/instrumented/mkinstrumented create mode 100755 test/instrumented/mkinstrumented.sh create mode 100644 test/instrumented/srt.patch diff --git a/instrumented.jar.desired.sha1 b/instrumented.jar.desired.sha1 new file mode 100644 index 0000000000..4d31c9e54f --- /dev/null +++ b/instrumented.jar.desired.sha1 @@ -0,0 +1 @@ +6785cf706a8448f8600f06b4c25d1816800422ce ?instrumented.jar diff --git a/src/library/scala/runtime/ScalaRunTime.scala b/src/library/scala/runtime/ScalaRunTime.scala index 951bdd888e..8bc63ae3a0 100644 --- a/src/library/scala/runtime/ScalaRunTime.scala +++ b/src/library/scala/runtime/ScalaRunTime.scala @@ -36,7 +36,16 @@ object ScalaRunTime { case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true case _ => false } - private val tupleNames = 1 to 22 map ("scala.Tuple" + _) toSet + // Avoiding boxing which messes up the specialized tests. Don't ask. + private val tupleNames = { + var i = 22 + var names: List[String] = Nil + while (i >= 1) { + names ::= ("scala.Tuple" + String.valueOf(i)) + i -= 1 + } + names.toSet + } /** Return the class object representing an unboxed value type, * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler diff --git a/test/files/speclib/instrumented.jar.desired.sha1 b/test/files/speclib/instrumented.jar.desired.sha1 index 68114c2393..27c1e8fc24 100644 --- a/test/files/speclib/instrumented.jar.desired.sha1 +++ b/test/files/speclib/instrumented.jar.desired.sha1 @@ -1 +1 @@ -2546f965f6718b000c4e6ef73559c11084177bd8 ?instrumented.jar +23b6a7aa89b0a8a210ae9b206dfd0998338798c7 ?instrumented.jar diff --git a/test/instrumented/boxes.patch b/test/instrumented/boxes.patch new file mode 100644 index 0000000000..11c5b37aa8 --- /dev/null +++ b/test/instrumented/boxes.patch @@ -0,0 +1,29 @@ +9a10,11 +> /* INSTRUMENTED VERSION */ +> +50a53,61 +> public static int booleanBoxCount = 0; +> public static int characterBoxCount = 0; +> public static int byteBoxCount = 0; +> public static int shortBoxCount = 0; +> public static int integerBoxCount = 0; +> public static int longBoxCount = 0; +> public static int floatBoxCount = 0; +> public static int doubleBoxCount = 0; +> +51a63 +> booleanBoxCount++; +55a68 +> characterBoxCount++; +59a73 +> byteBoxCount++; +63a78 +> shortBoxCount++; +67a83 +> integerBoxCount++; +71a88 +> longBoxCount++; +75a93 +> floatBoxCount++; +79a98 +> doubleBoxCount++; diff --git a/test/instrumented/library/scala/runtime/BoxesRunTime.java b/test/instrumented/library/scala/runtime/BoxesRunTime.java index 797e9f89dd..f06f86f2f2 100644 --- a/test/instrumented/library/scala/runtime/BoxesRunTime.java +++ b/test/instrumented/library/scala/runtime/BoxesRunTime.java @@ -30,9 +30,9 @@ import scala.math.ScalaNumber; * @contributor Stepan Koltsov * @version 2.0 */ public final class BoxesRunTime -{ +{ private static final int CHAR = 0, BYTE = 1, SHORT = 2, INT = 3, LONG = 4, FLOAT = 5, DOUBLE = 6, OTHER = 7; - + private static int typeCode(Object a) { if (a instanceof java.lang.Integer) return INT; if (a instanceof java.lang.Byte) return BYTE; @@ -43,13 +43,13 @@ public final class BoxesRunTime if (a instanceof java.lang.Float) return FLOAT; return OTHER; } - + private static String boxDescription(Object a) { return "" + a.getClass().getSimpleName() + "(" + a + ")"; } - + /* BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING ... BOXING */ - + public static int booleanBoxCount = 0; public static int characterBoxCount = 0; public static int byteBoxCount = 0; @@ -63,58 +63,58 @@ public final class BoxesRunTime booleanBoxCount++; return java.lang.Boolean.valueOf(b); } - + public static java.lang.Character boxToCharacter(char c) { characterBoxCount++; return java.lang.Character.valueOf(c); } - + public static java.lang.Byte boxToByte(byte b) { byteBoxCount++; return java.lang.Byte.valueOf(b); } - + public static java.lang.Short boxToShort(short s) { shortBoxCount++; return java.lang.Short.valueOf(s); } - + public static java.lang.Integer boxToInteger(int i) { integerBoxCount++; return java.lang.Integer.valueOf(i); } - + public static java.lang.Long boxToLong(long l) { longBoxCount++; return java.lang.Long.valueOf(l); } - + public static java.lang.Float boxToFloat(float f) { floatBoxCount++; return java.lang.Float.valueOf(f); } - + public static java.lang.Double boxToDouble(double d) { doubleBoxCount++; // System.out.println("box " + d); // (new Throwable()).printStackTrace(); return java.lang.Double.valueOf(d); } - + /* UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING ... UNBOXING */ - + public static boolean unboxToBoolean(Object b) { return b == null ? false : ((java.lang.Boolean)b).booleanValue(); } - + public static char unboxToChar(Object c) { return c == null ? 0 : ((java.lang.Character)c).charValue(); } - + public static byte unboxToByte(Object b) { return b == null ? 0 : ((java.lang.Byte)b).byteValue(); } - + public static short unboxToShort(Object s) { return s == null ? 0 : ((java.lang.Short)s).shortValue(); } @@ -122,22 +122,22 @@ public final class BoxesRunTime public static int unboxToInt(Object i) { return i == null ? 0 : ((java.lang.Integer)i).intValue(); } - + public static long unboxToLong(Object l) { return l == null ? 0 : ((java.lang.Long)l).longValue(); } - + public static float unboxToFloat(Object f) { return f == null ? 0.0f : ((java.lang.Float)f).floatValue(); } - + public static double unboxToDouble(Object d) { // System.out.println("unbox " + d); return d == null ? 0.0d : ((java.lang.Double)d).doubleValue(); } /* COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON ... COMPARISON */ - + private static int eqTypeCode(Number a) { if ((a instanceof java.lang.Integer) || (a instanceof java.lang.Byte)) return INT; if (a instanceof java.lang.Long) return LONG; @@ -146,8 +146,8 @@ public final class BoxesRunTime if (a instanceof java.lang.Float) return FLOAT; return OTHER; } - - public static boolean equals(Object x, Object y) { + + public static boolean equals(Object x, Object y) { if (x == y) return true; return equals2(x, y); } @@ -162,10 +162,10 @@ public final class BoxesRunTime return equalsCharObject((java.lang.Character)x, y); if (x == null) return y == null; - + return x.equals(y); } - + public static boolean equalsNumObject(java.lang.Number xn, Object y) { if (y instanceof java.lang.Number) return equalsNumNum(xn, (java.lang.Number)y); @@ -173,10 +173,10 @@ public final class BoxesRunTime return equalsNumChar(xn, (java.lang.Character)y); if (xn == null) return y == null; - + return xn.equals(y); } - + public static boolean equalsNumNum(java.lang.Number xn, java.lang.Number yn) { int xcode = eqTypeCode(xn); int ycode = eqTypeCode(yn); @@ -195,10 +195,10 @@ public final class BoxesRunTime } if (xn == null) return yn == null; - + return xn.equals(yn); } - + public static boolean equalsCharObject(java.lang.Character xc, Object y) { if (y instanceof java.lang.Character) return xc.charValue() == ((java.lang.Character)y).charValue(); @@ -206,7 +206,7 @@ public final class BoxesRunTime return equalsNumChar((java.lang.Number)y, xc); if (xc == null) return y == null; - + return xc.equals(y); } @@ -224,11 +224,11 @@ public final class BoxesRunTime default: if (xn == null) return yc == null; - + return xn.equals(yc); } } - + /** Hashcode algorithm is driven by the requirements imposed * by primitive equality semantics, namely that equal objects * have equal hashCodes. The first priority are the integral/char @@ -262,16 +262,16 @@ public final class BoxesRunTime else return n.hashCode(); } public static int hashFromDouble(java.lang.Double n) { - int iv = n.intValue(); + int iv = n.intValue(); double dv = n.doubleValue(); if (iv == dv) return iv; - + long lv = n.longValue(); if (lv == dv) return java.lang.Long.valueOf(lv).hashCode(); else return n.hashCode(); } public static int hashFromFloat(java.lang.Float n) { - int iv = n.intValue(); + int iv = n.intValue(); float fv = n.floatValue(); if (iv == fv) return iv; @@ -289,9 +289,9 @@ public final class BoxesRunTime if (a instanceof Number) return hashFromNumber((Number)a); else return a.hashCode(); } - + /* OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS ... OPERATORS */ - + /** arg1 + arg2 */ public static Object add(Object arg1, Object arg2) throws NoSuchMethodException { int code1 = typeCode(arg1); @@ -518,7 +518,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + /** -arg */ public static Object negate(Object arg) throws NoSuchMethodException { int code = typeCode(arg); @@ -540,7 +540,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + /** +arg */ public static Object positive(Object arg) throws NoSuchMethodException { int code = typeCode(arg); @@ -650,7 +650,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + /** ~arg */ public static Object complement(Object arg) throws NoSuchMethodException { int code = typeCode(arg); @@ -664,7 +664,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + /** !arg */ public static Object takeNot(Object arg) throws NoSuchMethodException { if (arg instanceof Boolean) { @@ -672,15 +672,15 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + public static Object testEqual(Object arg1, Object arg2) throws NoSuchMethodException { return boxToBoolean(arg1 == arg2); } - + public static Object testNotEqual(Object arg1, Object arg2) throws NoSuchMethodException { return boxToBoolean(arg1 != arg2); } - + public static Object testLessThan(Object arg1, Object arg2) throws NoSuchMethodException { int code1 = typeCode(arg1); int code2 = typeCode(arg2); @@ -707,7 +707,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + public static Object testLessOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { int code1 = typeCode(arg1); int code2 = typeCode(arg2); @@ -734,7 +734,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + public static Object testGreaterOrEqualThan(Object arg1, Object arg2) throws NoSuchMethodException { int code1 = typeCode(arg1); int code2 = typeCode(arg2); @@ -761,7 +761,7 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + public static Object testGreaterThan(Object arg1, Object arg2) throws NoSuchMethodException { int code1 = typeCode(arg1); int code2 = typeCode(arg2); @@ -788,7 +788,25 @@ public final class BoxesRunTime } throw new NoSuchMethodException(); } - + + public static boolean isBoxedNumberOrBoolean(Object arg) { + if (arg instanceof java.lang.Boolean) + return true; + else + return isBoxedNumber(arg); + } + public static boolean isBoxedNumber(Object arg) { + return ( + (arg instanceof java.lang.Integer) + || (arg instanceof java.lang.Long) + || (arg instanceof java.lang.Double) + || (arg instanceof java.lang.Float) + || (arg instanceof java.lang.Short) + || (arg instanceof java.lang.Character) + || (arg instanceof java.lang.Byte) + ); + } + /** arg.toChar */ public static java.lang.Character toCharacter(Object arg) throws NoSuchMethodException { if (arg instanceof java.lang.Integer) return boxToCharacter((char)unboxToInt(arg)); @@ -872,5 +890,5 @@ public final class BoxesRunTime if (arg instanceof java.lang.Short) return boxToDouble((double)unboxToShort(arg)); throw new NoSuchMethodException(); } - + } diff --git a/test/instrumented/library/scala/runtime/ScalaRunTime.scala b/test/instrumented/library/scala/runtime/ScalaRunTime.scala index a8a74dd8ab..9eb93a418d 100644 --- a/test/instrumented/library/scala/runtime/ScalaRunTime.scala +++ b/test/instrumented/library/scala/runtime/ScalaRunTime.scala @@ -6,70 +6,102 @@ ** |/ ** \* */ - +package scala.runtime /* INSTRUMENTED VERSION */ -package scala.runtime - -import scala.reflect.ClassManifest -import scala.collection.{ Seq, IndexedSeq, TraversableView } +import scala.collection.{ Seq, IndexedSeq, TraversableView, AbstractIterator } import scala.collection.mutable.WrappedArray -import scala.collection.immutable.{ NumericRange, List, Stream, Nil, :: } +import scala.collection.immutable.{ StringLike, NumericRange, List, Stream, Nil, :: } import scala.collection.generic.{ Sorted } -import scala.xml.{ Node, MetaData } import scala.util.control.ControlThrowable +import scala.xml.{ Node, MetaData } + +import java.lang.Double.doubleToLongBits import java.lang.reflect.{ Modifier, Method => JMethod } -/* The object ScalaRunTime provides ... +/** The object ScalaRunTime provides support methods required by + * the scala runtime. All these methods should be considered + * outside the API and subject to change or removal without notice. */ object ScalaRunTime { def isArray(x: AnyRef): Boolean = isArray(x, 1) - def isArray(x: Any, atLevel: Int): Boolean = - x != null && isArrayClass(x.asInstanceOf[AnyRef].getClass, atLevel) + def isArray(x: Any, atLevel: Int): Boolean = + x != null && isArrayClass(x.getClass, atLevel) private def isArrayClass(clazz: Class[_], atLevel: Int): Boolean = clazz.isArray && (atLevel == 1 || isArrayClass(clazz.getComponentType, atLevel - 1)) - def isValueClass(clazz: Class[_]) = clazz.isPrimitive() - + def isValueClass(clazz: Class[_]) = clazz.isPrimitive() var arrayApplyCount = 0 var arrayUpdateCount = 0 - + + def isTuple(x: Any) = tupleNames(x.getClass.getName) + def isAnyVal(x: Any) = x match { + case _: Byte | _: Short | _: Char | _: Int | _: Long | _: Float | _: Double | _: Boolean | _: Unit => true + case _ => false + } + // Avoiding boxing which messes up the specialized tests. Don't ask. + private val tupleNames = { + var i = 22 + var names: List[String] = Nil + while (i >= 1) { + names ::= ("scala.Tuple" + String.valueOf(i)) + i -= 1 + } + names.toSet + } + + /** Return the class object representing an unboxed value type, + * e.g. classOf[int], not classOf[java.lang.Integer]. The compiler + * rewrites expressions like 5.getClass to come here. + */ + def anyValClass[T <: AnyVal](value: T): Class[T] = (value match { + case x: Byte => java.lang.Byte.TYPE + case x: Short => java.lang.Short.TYPE + case x: Char => java.lang.Character.TYPE + case x: Int => java.lang.Integer.TYPE + case x: Long => java.lang.Long.TYPE + case x: Float => java.lang.Float.TYPE + case x: Double => java.lang.Double.TYPE + case x: Boolean => java.lang.Boolean.TYPE + case x: Unit => java.lang.Void.TYPE + }).asInstanceOf[Class[T]] + /** Retrieve generic array element */ def array_apply(xs: AnyRef, idx: Int): Any = { arrayApplyCount += 1 xs match { - case x: Array[AnyRef] => x(idx).asInstanceOf[Any] - case x: Array[Int] => x(idx).asInstanceOf[Any] - case x: Array[Double] => x(idx).asInstanceOf[Any] - case x: Array[Long] => x(idx).asInstanceOf[Any] - case x: Array[Float] => x(idx).asInstanceOf[Any] - case x: Array[Char] => x(idx).asInstanceOf[Any] - case x: Array[Byte] => x(idx).asInstanceOf[Any] - case x: Array[Short] => x(idx).asInstanceOf[Any] - case x: Array[Boolean] => x(idx).asInstanceOf[Any] - case x: Array[Unit] => x(idx).asInstanceOf[Any] - case null => throw new NullPointerException - } + case x: Array[AnyRef] => x(idx).asInstanceOf[Any] + case x: Array[Int] => x(idx).asInstanceOf[Any] + case x: Array[Double] => x(idx).asInstanceOf[Any] + case x: Array[Long] => x(idx).asInstanceOf[Any] + case x: Array[Float] => x(idx).asInstanceOf[Any] + case x: Array[Char] => x(idx).asInstanceOf[Any] + case x: Array[Byte] => x(idx).asInstanceOf[Any] + case x: Array[Short] => x(idx).asInstanceOf[Any] + case x: Array[Boolean] => x(idx).asInstanceOf[Any] + case x: Array[Unit] => x(idx).asInstanceOf[Any] + case null => throw new NullPointerException + } } /** update generic array element */ def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { arrayUpdateCount += 1 xs match { - case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] - case x: Array[Int] => x(idx) = value.asInstanceOf[Int] - case x: Array[Double] => x(idx) = value.asInstanceOf[Double] - case x: Array[Long] => x(idx) = value.asInstanceOf[Long] - case x: Array[Float] => x(idx) = value.asInstanceOf[Float] - case x: Array[Char] => x(idx) = value.asInstanceOf[Char] - case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] - case x: Array[Short] => x(idx) = value.asInstanceOf[Short] - case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] - case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] - case null => throw new NullPointerException - } + case x: Array[AnyRef] => x(idx) = value.asInstanceOf[AnyRef] + case x: Array[Int] => x(idx) = value.asInstanceOf[Int] + case x: Array[Double] => x(idx) = value.asInstanceOf[Double] + case x: Array[Long] => x(idx) = value.asInstanceOf[Long] + case x: Array[Float] => x(idx) = value.asInstanceOf[Float] + case x: Array[Char] => x(idx) = value.asInstanceOf[Char] + case x: Array[Byte] => x(idx) = value.asInstanceOf[Byte] + case x: Array[Short] => x(idx) = value.asInstanceOf[Short] + case x: Array[Boolean] => x(idx) = value.asInstanceOf[Boolean] + case x: Array[Unit] => x(idx) = value.asInstanceOf[Unit] + case null => throw new NullPointerException + } } /** Get generic array length */ @@ -85,7 +117,7 @@ object ScalaRunTime { case x: Array[Boolean] => x.length case x: Array[Unit] => x.length case null => throw new NullPointerException - } + } def array_clone(xs: AnyRef): AnyRef = xs match { case x: Array[AnyRef] => ArrayRuntime.cloneArray(x) @@ -122,7 +154,7 @@ object ScalaRunTime { } arr } - + // Java bug: http://bugs.sun.com/bugdatabase/view_bug.do?bug_id=4071957 // More background at ticket #2318. def ensureAccessible(m: JMethod): JMethod = { @@ -130,10 +162,10 @@ object ScalaRunTime { try m setAccessible true catch { case _: SecurityException => () } } - m + m } - def checkInitialized[T <: AnyRef](x: T): T = + def checkInitialized[T <: AnyRef](x: T): T = if (x == null) throw new UninitializedError else x abstract class Try[+A] { @@ -143,9 +175,9 @@ object ScalaRunTime { def Try[A](block: => A): Try[A] = new Try[A] with Runnable { private var result: A = _ - private var exception: Throwable = + private var exception: Throwable = try { run() ; null } - catch { + catch { case e: ControlThrowable => throw e // don't catch non-local returns etc case e: Throwable => e } @@ -167,27 +199,26 @@ object ScalaRunTime { def _toString(x: Product): String = x.productIterator.mkString(x.productPrefix + "(", ",", ")") - - def _hashCode(x: Product): Int = { - import scala.util.MurmurHash._ - val arr = x.productArity - var h = startHash(arr) - var c = startMagicA - var k = startMagicB - var i = 0 - while (i < arr) { - val elem = x.productElement(i) - h = extendHash(h, if (elem == null) 0 else elem.##, c, k) - c = nextMagicA(c) - k = nextMagicB(k) - i += 1 + + def _hashCode(x: Product): Int = scala.util.MurmurHash3.productHash(x) + + /** A helper for case classes. */ + def typedProductIterator[T](x: Product): Iterator[T] = { + new AbstractIterator[T] { + private var c: Int = 0 + private val cmax = x.productArity + def hasNext = c < cmax + def next() = { + val result = x.productElement(c) + c += 1 + result.asInstanceOf[T] + } } - finalizeHash(h) } /** Fast path equality method for inlining; used when -optimise is set. */ - @inline def inlinedEquals(x: Object, y: Object): Boolean = + @inline def inlinedEquals(x: Object, y: Object): Boolean = if (x eq y) true else if (x eq null) false else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.equalsNumObject(x.asInstanceOf[java.lang.Number], y) @@ -198,20 +229,21 @@ object ScalaRunTime { case y: Product if x.productArity == y.productArity => x.productIterator sameElements y.productIterator case _ => false } - + // hashcode ----------------------------------------------------------- // // Note that these are the implementations called by ##, so they // must not call ## themselves. - + @inline def hash(x: Any): Int = - if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number]) + if (x == null) 0 + else if (x.isInstanceOf[java.lang.Number]) BoxesRunTime.hashFromNumber(x.asInstanceOf[java.lang.Number]) else x.hashCode - + @inline def hash(dv: Double): Int = { val iv = dv.toInt if (iv == dv) return iv - + val lv = dv.toLong if (lv == dv) return lv.hashCode @@ -221,31 +253,27 @@ object ScalaRunTime { @inline def hash(fv: Float): Int = { val iv = fv.toInt if (iv == fv) return iv - + val lv = fv.toLong - if (lv == fv) return lv.hashCode + if (lv == fv) return hash(lv) else fv.hashCode } @inline def hash(lv: Long): Int = { - val iv = lv.toInt - if (iv == lv) iv else lv.hashCode + val low = lv.toInt + val lowSign = low >>> 31 + val high = (lv >>> 32).toInt + low ^ (high + lowSign) } + @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x) + + // The remaining overloads are here for completeness, but the compiler + // inlines these definitions directly so they're not generally used. @inline def hash(x: Int): Int = x @inline def hash(x: Short): Int = x.toInt @inline def hash(x: Byte): Int = x.toInt @inline def hash(x: Char): Int = x.toInt - @inline def hash(x: Boolean): Int = x.hashCode + @inline def hash(x: Boolean): Int = if (x) true.hashCode else false.hashCode @inline def hash(x: Unit): Int = 0 - - @inline def hash(x: Number): Int = runtime.BoxesRunTime.hashFromNumber(x) - - /** XXX Why is there one boxed implementation in here? It would seem - * we should have all the numbers or none of them. - */ - @inline def hash(x: java.lang.Long): Int = { - val iv = x.intValue - if (iv == x.longValue) iv else x.hashCode - } /** A helper method for constructing case class equality methods, * because existential types get in the way of a clean outcome and @@ -263,17 +291,13 @@ object ScalaRunTime { * called on null and (b) depending on the apparent type of an * array, toString may or may not print it in a human-readable form. * - * @param arg the value to stringify - * @return a string representation of arg - * - */ + * @param arg the value to stringify + * @return a string representation of arg. + */ def stringOf(arg: Any): String = stringOf(arg, scala.Int.MaxValue) - def stringOf(arg: Any, maxElements: Int): String = { + def stringOf(arg: Any, maxElements: Int): String = { def isScalaClass(x: AnyRef) = Option(x.getClass.getPackage) exists (_.getName startsWith "scala.") - - def isTuple(x: AnyRef) = - x.getClass.getName matches """^scala\.Tuple(\d+).*""" // When doing our own iteration is dangerous def useOwnToString(x: Any) = x match { @@ -283,8 +307,8 @@ object ScalaRunTime { case _: Range | _: NumericRange[_] => true // Sorted collections to the wrong thing (for us) on iteration - ticket #3493 case _: Sorted[_, _] => true - // StringBuilder(a, b, c) is not so attractive - case _: StringBuilder => true + // StringBuilder(a, b, c) and similar not so attractive + case _: StringLike[_] => true // Don't want to evaluate any elements in a view case _: TraversableView[_, _] => true // Don't want to a) traverse infinity or b) be overly helpful with peoples' custom @@ -299,14 +323,27 @@ object ScalaRunTime { case (k, v) => inner(k) + " -> " + inner(v) case _ => inner(arg) } - // The recursively applied attempt to prettify Array printing + + // Special casing Unit arrays, the value class which uses a reference array type. + def arrayToString(x: AnyRef) = { + if (x.getClass.getComponentType == classOf[BoxedUnit]) + 0 until (array_length(x) min maxElements) map (_ => "()") mkString ("Array(", ", ", ")") + else + WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") + } + + // The recursively applied attempt to prettify Array printing. + // Note that iterator is used if possible and foreach is used as a + // last resort, because the parallel collections "foreach" in a + // random order even on sequences. def inner(arg: Any): String = arg match { case null => "null" case "" => "\"\"" case x: String => if (x.head.isWhitespace || x.last.isWhitespace) "\"" + x + "\"" else x - case x if useOwnToString(x) => x.toString - case x: AnyRef if isArray(x) => WrappedArray make x take maxElements map inner mkString ("Array(", ", ", ")") - case x: collection.Map[_, _] => x take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") + case x if useOwnToString(x) => x toString + case x: AnyRef if isArray(x) => arrayToString(x) + case x: collection.Map[_, _] => x.iterator take maxElements map mapInner mkString (x.stringPrefix + "(", ", ", ")") + case x: Iterable[_] => x.iterator take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Traversable[_] => x take maxElements map inner mkString (x.stringPrefix + "(", ", ", ")") case x: Product1[_] if isTuple(x) => "(" + inner(x._1) + ",)" // that special trailing comma case x: Product if isTuple(x) => x.productIterator map inner mkString ("(", ",", ")") @@ -315,13 +352,31 @@ object ScalaRunTime { // The try/catch is defense against iterables which aren't actually designed // to be iterated, such as some scala.tools.nsc.io.AbstractFile derived classes. - val s = - try inner(arg) - catch { - case _: StackOverflowError | _: UnsupportedOperationException => arg.toString - } - + try inner(arg) + catch { + case _: StackOverflowError | _: UnsupportedOperationException | _: AssertionError => "" + arg + } + } + + /** stringOf formatted for use in a repl result. */ + def replStringOf(arg: Any, maxElements: Int): String = { + val s = stringOf(arg, maxElements) val nl = if (s contains "\n") "\n" else "" - nl + s + "\n" + + nl + s + "\n" + } + private[scala] def checkZip(what: String, coll1: TraversableOnce[_], coll2: TraversableOnce[_]) { + if (sys.props contains "scala.debug.zip") { + val xs = coll1.toIndexedSeq + val ys = coll2.toIndexedSeq + if (xs.length != ys.length) { + Console.err.println( + "Mismatched zip in " + what + ":\n" + + " this: " + xs.mkString(", ") + "\n" + + " that: " + ys.mkString(", ") + ) + (new Exception).getStackTrace.drop(2).take(10).foreach(println) + } + } } } diff --git a/test/instrumented/mkinstrumented b/test/instrumented/mkinstrumented deleted file mode 100755 index a87e8cb94f..0000000000 --- a/test/instrumented/mkinstrumented +++ /dev/null @@ -1,46 +0,0 @@ -# -# -# Used to compile a jar with instrumented versions of certain classes. -# - - - - -if [ $# -ne 1 ] -then - echo "Must provide build dir ('target' or 'build')." - exit 1 -fi - - -BUILDDIR=$1 -TOPDIR=../.. -SCALAC=$TOPDIR/$BUILDDIR/pack/bin/scalac -SRC_DIR=library/ -SCALALIB=$TOPDIR/$BUILDDIR/pack/lib/scala-library.jar -CLASSDIR=classes/ -ARTIFACT=instrumented.jar - - -# compile it -rm -rf $CLASSDIR -mkdir $CLASSDIR -JSOURCES=`find $SRC_DIR -name "*.java" -print` -SOURCES=`find $SRC_DIR \( -name "*.scala" -o -name "*.java" \) -print` -echo $SOURCES -$SCALAC -d $CLASSDIR $SOURCES -javac -cp $SCALALIB -d $CLASSDIR $JSOURCES - - -# jar it up -rm $ARTIFACT -cd $CLASSDIR -jar cf $ARTIFACT . -mv $ARTIFACT ../ -cd .. - - - - - - diff --git a/test/instrumented/mkinstrumented.sh b/test/instrumented/mkinstrumented.sh new file mode 100755 index 0000000000..d734dd2e00 --- /dev/null +++ b/test/instrumented/mkinstrumented.sh @@ -0,0 +1,51 @@ +#/bin/sh +# +# Used to compile a jar with instrumented versions of certain classes. +# + +set -e + +run () { + echo "% $@" + "$@" +} + +if [ $# -ne 1 ] +then + echo "Must provide build dir ('target' or 'build')." + exit 1 +fi + +scriptDir=$(cd $(dirname $0) && pwd) + +TOPDIR="$scriptDir/../.." +RUNTIME="$TOPDIR/src/library/scala/runtime" +SOURCES="$RUNTIME/BoxesRunTime.java $RUNTIME/ScalaRunTime.scala" +SCALAC=$TOPDIR/$1/pack/bin/scalac +SRC_DIR="$scriptDir/library/scala/runtime" +SCALALIB=$TOPDIR/$1/pack/lib/scala-library.jar +CLASSDIR="$scriptDir/classes" +ARTIFACT=instrumented.jar +DESTINATION="$TOPDIR/test/files/speclib" + +[[ -x "$SCALAC" ]] || exit 1; + +# compile it +run rm -rf $CLASSDIR && mkdir $CLASSDIR +run cp $SOURCES $SRC_DIR +( cd $SRC_DIR && run patch BoxesRunTime.java $scriptDir/boxes.patch && run patch ScalaRunTime.scala $scriptDir/srt.patch ) + +ORIG=$(find $SRC_DIR -name '*.orig') +[[ -z "$ORIG" ]] || rm -f $ORIG + +JSOURCES=$(find $SRC_DIR -name "*.java" -print) +SOURCES=$(find $SRC_DIR -type f -print) +# echo $SOURCES +run $SCALAC -d $CLASSDIR $SOURCES +run javac -cp $SCALALIB -d $CLASSDIR $JSOURCES + +# jar it up +run cd $CLASSDIR +run jar cf $ARTIFACT . +run mv -f $ARTIFACT "$DESTINATION" +echo "$(cd "$DESTINATION" && pwd)/$ARTIFACT has been created." \ No newline at end of file diff --git a/test/instrumented/srt.patch b/test/instrumented/srt.patch new file mode 100644 index 0000000000..2f472ff1c0 --- /dev/null +++ b/test/instrumented/srt.patch @@ -0,0 +1,23 @@ +9a10,11 +> /* INSTRUMENTED VERSION */ +> +33a36,38 +> var arrayApplyCount = 0 +> var arrayUpdateCount = 0 +> +35c40,42 +< def array_apply(xs: AnyRef, idx: Int): Any = xs match { +--- +> def array_apply(xs: AnyRef, idx: Int): Any = { +> arrayApplyCount += 1 +> xs match { +47a55 +> } +50c58,60 +< def array_update(xs: AnyRef, idx: Int, value: Any): Unit = xs match { +--- +> def array_update(xs: AnyRef, idx: Int, value: Any): Unit = { +> arrayUpdateCount += 1 +> xs match { +62a73 +> } -- cgit v1.2.3 From 97ec16850b152e6e03aba351cc918c66583362f3 Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Tue, 14 Feb 2012 21:59:43 +0100 Subject: Disables typechecking that precedes reflective compilation Previously toolboxes used to typecheck their inputs before compiling. Actually, the initial demo by Martin first typechecked the reified tree, then ran it, which typechecked it again, and only then launched the reflective compiler. However, as observed in https://issues.scala-lang.org/browse/SI-5464 current implementation of typechecking is not always idempotent. That's why we cannot allow inputs of toolboxes to be typechecked, at least not until the aforementioned issue is closed. This is not only a convention but also a check inside compileExpr. --- .../scala/reflect/internal/Importers.scala | 2 +- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 40 ++++---- test/files/neg/reify_ann2a.scala | 2 +- test/files/neg/reify_ann2b.scala | 2 +- test/files/run/reify_ann1a.scala | 2 +- test/files/run/reify_ann1b.scala | 2 +- test/files/run/reify_anonymous.scala | 3 +- test/files/run/reify_classfileann_a.scala | 2 +- test/files/run/reify_closure1.scala | 3 +- test/files/run/reify_closure2a.scala | 3 +- test/files/run/reify_closure3a.scala | 3 +- test/files/run/reify_closure4a.scala | 3 +- test/files/run/reify_closure5a.scala | 3 +- test/files/run/reify_closure6.scala | 3 +- test/files/run/reify_closure7.scala | 3 +- test/files/run/reify_closure8a.scala | 3 +- test/files/run/reify_closures10.scala | 3 +- test/files/run/reify_complex.scala | 3 +- test/files/run/reify_extendbuiltins.scala | 3 +- test/files/run/reify_for1.scala | 3 +- test/files/run/reify_fors.check | 5 + test/files/run/reify_fors.scala | 106 ++++++++++++++++++++ test/files/run/reify_generic.scala | 3 +- test/files/run/reify_generic2.scala | 3 +- test/files/run/reify_getter.scala | 3 +- test/files/run/reify_implicits.scala | 3 +- test/files/run/reify_inheritance.scala | 3 +- test/files/run/reify_inner1.scala | 3 +- test/files/run/reify_inner2.scala | 3 +- test/files/run/reify_inner3.scala | 3 +- test/files/run/reify_inner4.scala | 3 +- test/files/run/reify_maps.check | 4 + test/files/run/reify_maps.scala | 25 +++++ test/files/run/reify_printf.scala | 3 +- test/files/run/reify_sort.scala | 3 +- test/files/run/reify_sort1.scala | 3 +- test/files/run/reify_this.scala | 3 +- test/files/run/reify_timeofday.check | 1 + test/files/run/reify_timeofday.scala | 47 +++++++++ test/files/run/reify_varargs.scala | 3 +- test/files/run/t5229_1.scala | 3 +- test/files/run/t5229_2.check | 2 +- test/files/run/t5229_2.scala | 3 +- test/files/run/t5230.check | 2 +- test/files/run/t5230.scala | 3 +- test/files/run/t5258a.scala | 3 +- test/files/run/t5266_1.check | 2 +- test/files/run/t5266_1.scala | 3 +- test/files/run/t5266_2.check | 2 +- test/files/run/t5266_2.scala | 3 +- test/files/run/t5269.scala | 3 +- test/files/run/t5270.scala | 3 +- test/files/run/t5271_1.check | 11 +++ test/files/run/t5271_1.scala | 3 +- test/files/run/t5271_2.check | 13 ++- test/files/run/t5271_2.scala | 3 +- test/files/run/t5271_3.check | 20 +++- test/files/run/t5271_3.scala | 3 +- test/files/run/t5271_4.scala | 3 +- test/files/run/t5272_1.scala | 3 +- test/files/run/t5272_2.scala | 3 +- test/files/run/t5273_1.scala | 3 +- test/files/run/t5273_2a.scala | 3 +- test/files/run/t5273_2b.scala | 3 +- test/files/run/t5274_1.scala | 3 +- test/files/run/t5274_2.scala | 3 +- test/files/run/t5275.scala | 3 +- test/files/run/t5276_1a.scala | 3 +- test/files/run/t5276_1b.scala | 5 +- test/files/run/t5276_2a.scala | 3 +- test/files/run/t5276_2b.scala | 3 +- test/files/run/t5277_1.scala | 3 +- test/files/run/t5277_2.scala | 3 +- test/files/run/t5279.scala | 3 +- test/files/run/t5334_1.check | 1 - test/files/run/t5334_1.scala | 4 +- test/files/run/t5334_2.check | 1 - test/files/run/t5334_2.scala | 4 +- test/files/run/t5335.scala | 3 +- test/pending/run/reify_addressbook.scala | 3 +- test/pending/run/reify_brainf_ck.scala | 3 +- test/pending/run/reify_callccinterpreter.scala | 3 +- test/pending/run/reify_classfileann_b.scala | 8 +- test/pending/run/reify_closure2b.scala | 3 +- test/pending/run/reify_closure3b.scala | 3 +- test/pending/run/reify_closure4b.scala | 3 +- test/pending/run/reify_closure5b.scala | 3 +- test/pending/run/reify_closure8b.scala | 3 +- test/pending/run/reify_closure9a.scala | 3 +- test/pending/run/reify_closure9b.scala | 3 +- test/pending/run/reify_closures11.scala | 3 +- test/pending/run/reify_csv.scala | 3 +- test/pending/run/reify_fors.check | 5 - test/pending/run/reify_fors.scala | 107 --------------------- test/pending/run/reify_gadts.scala | 3 +- test/pending/run/reify_lazyevaluation.scala | 3 +- test/pending/run/reify_maps.check | 4 - test/pending/run/reify_maps.scala | 26 ----- test/pending/run/reify_properties.scala | 3 +- test/pending/run/reify_simpleinterpreter.scala | 3 +- test/pending/run/reify_timeofday.check | 1 - test/pending/run/reify_timeofday.scala | 48 --------- test/pending/run/t5258b.scala | 3 +- test/pending/run/t5258c.scala | 3 +- test/pending/run/t5271_1.check | 0 test/pending/run/t5271_1.scala | 13 +++ test/pending/run/t5271_2.check | 1 + test/pending/run/t5271_2.scala | 15 +++ test/pending/run/t5271_3.check | 1 + test/pending/run/t5271_3.scala | 16 +++ test/pending/run/t5418.scala | 3 +- 111 files changed, 388 insertions(+), 383 deletions(-) create mode 100644 test/files/run/reify_fors.check create mode 100644 test/files/run/reify_fors.scala create mode 100644 test/files/run/reify_maps.check create mode 100644 test/files/run/reify_maps.scala create mode 100644 test/files/run/reify_timeofday.check create mode 100644 test/files/run/reify_timeofday.scala delete mode 100644 test/pending/run/reify_fors.check delete mode 100644 test/pending/run/reify_fors.scala delete mode 100644 test/pending/run/reify_maps.check delete mode 100644 test/pending/run/reify_maps.scala delete mode 100644 test/pending/run/reify_timeofday.check delete mode 100644 test/pending/run/reify_timeofday.scala create mode 100644 test/pending/run/t5271_1.check create mode 100644 test/pending/run/t5271_1.scala create mode 100644 test/pending/run/t5271_2.check create mode 100644 test/pending/run/t5271_2.scala create mode 100644 test/pending/run/t5271_3.check create mode 100644 test/pending/run/t5271_3.scala diff --git a/src/compiler/scala/reflect/internal/Importers.scala b/src/compiler/scala/reflect/internal/Importers.scala index c232e3b7c1..1ae4f755ed 100644 --- a/src/compiler/scala/reflect/internal/Importers.scala +++ b/src/compiler/scala/reflect/internal/Importers.scala @@ -286,7 +286,7 @@ trait Importers { self: SymbolTable => new Modifiers(mods.flags, importName(mods.privateWithin), mods.annotations map importTree) def importImportSelector(sel: from.ImportSelector): ImportSelector = - new ImportSelector(importName(sel.name), sel.namePos, importName(sel.rename), sel.renamePos) + new ImportSelector(importName(sel.name), sel.namePos, if (sel.rename != null) importName(sel.rename) else null, sel.renamePos) def importTree(tree: from.Tree): Tree = { val mytree = tree match { diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 880c68eaa0..f52662ce6f 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -64,7 +64,7 @@ trait ToolBoxes extends { self: Universe => obj setInfo obj.moduleClass.tpe val meth = obj.moduleClass.newMethod(newTermName(wrapperMethodName)) def makeParam(fv: Symbol) = meth.newValueParameter(fv.name.toTermName) setInfo fv.tpe - meth setInfo MethodType(fvs map makeParam, expr.tpe) + meth setInfo MethodType(fvs map makeParam, AnyClass.tpe) minfo.decls enter meth trace("wrapping ")(defOwner(expr) -> meth) val methdef = DefDef(meth, expr changeOwner (defOwner(expr) -> meth)) @@ -94,6 +94,20 @@ trait ToolBoxes extends { self: Universe => } def compileExpr(expr: Tree, fvs: List[Symbol]): String = { + // Previously toolboxes used to typecheck their inputs before compiling. + // Actually, the initial demo by Martin first typechecked the reified tree, + // then ran it, which typechecked it again, and only then launched the + // reflective compiler. + // + // However, as observed in https://issues.scala-lang.org/browse/SI-5464 + // current implementation typechecking is not always idempotent. + // That's why we cannot allow inputs of toolboxes to be typechecked, + // at least not until the aforementioned issue is closed. + val typed = expr filter (t => t.tpe != null && t.tpe != NoType && !t.isInstanceOf[TypeTree]) + if (!typed.isEmpty) { + throw new Error("cannot compile trees that are already typed") + } + val mdef = wrapInObject(expr, fvs) val pdef = wrapInPackage(mdef) val unit = wrapInCompilationUnit(pdef) @@ -106,7 +120,6 @@ trait ToolBoxes extends { self: Universe => jclazz.getDeclaredMethods.find(_.getName == name).get def runExpr(expr: Tree): Any = { - val etpe = expr.tpe val fvs = (expr filter isFree map (_.symbol)).distinct reporter.reset() @@ -181,19 +194,13 @@ trait ToolBoxes extends { self: Universe => lazy val classLoader = new AbstractFileClassLoader(virtualDirectory, defaultReflectiveClassLoader) - private def importAndTypeCheck(tree: rm.Tree, expectedType: rm.Type): compiler.Tree = { + def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = { + if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType) val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree]) val pt: compiler.Type = importer.importType(expectedType.asInstanceOf[Type]) -// val typer = compiler.typer.atOwner(ctree, if (owner.isModule) cowner.moduleClass else cowner) val ttree: compiler.Tree = compiler.typedTopLevelExpr(ctree, pt) - ttree - } - - def typeCheck(tree: rm.Tree, expectedType: rm.Type): rm.Tree = { - if (compiler.settings.verbose.value) println("typing "+tree+", pt = "+expectedType) - val ttree = importAndTypeCheck(tree, expectedType) - val ettree = exporter.importTree(ttree).asInstanceOf[rm.Tree] - ettree + val rmttree = exporter.importTree(ttree).asInstanceOf[rm.Tree] + rmttree } def typeCheck(tree: rm.Tree): rm.Tree = @@ -202,11 +209,10 @@ trait ToolBoxes extends { self: Universe => def showAttributed(tree: rm.Tree, printTypes: Boolean = true, printIds: Boolean = true, printKinds: Boolean = false): String = compiler.showAttributed(importer.importTree(tree.asInstanceOf[Tree]), printTypes, printIds, printKinds) - def runExpr(tree: rm.Tree, expectedType: rm.Type): Any = { - val ttree = importAndTypeCheck(tree, expectedType) - compiler.runExpr(ttree) + def runExpr(tree: rm.Tree): Any = { + if (compiler.settings.verbose.value) println("running "+tree) + val ctree: compiler.Tree = importer.importTree(tree.asInstanceOf[Tree]) + compiler.runExpr(ctree) } - - def runExpr(tree: rm.Tree): Any = runExpr(tree, WildcardType.asInstanceOf[rm.Type]) } } diff --git a/test/files/neg/reify_ann2a.scala b/test/files/neg/reify_ann2a.scala index 071919eb76..8de0984074 100644 --- a/test/files/neg/reify_ann2a.scala +++ b/test/files/neg/reify_ann2a.scala @@ -26,5 +26,5 @@ object Test extends App { println(ttree.toString) // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/files/neg/reify_ann2b.scala b/test/files/neg/reify_ann2b.scala index 74273ad6ec..b43567c2a7 100644 --- a/test/files/neg/reify_ann2b.scala +++ b/test/files/neg/reify_ann2b.scala @@ -26,5 +26,5 @@ object Test extends App { println(ttree.toString) // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/files/run/reify_ann1a.scala b/test/files/run/reify_ann1a.scala index 933ea21b20..1ca170904b 100644 --- a/test/files/run/reify_ann1a.scala +++ b/test/files/run/reify_ann1a.scala @@ -26,5 +26,5 @@ object Test extends App { println(ttree.toString) // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/files/run/reify_ann1b.scala b/test/files/run/reify_ann1b.scala index 53dfe08086..9bdc712227 100644 --- a/test/files/run/reify_ann1b.scala +++ b/test/files/run/reify_ann1b.scala @@ -26,5 +26,5 @@ object Test extends App { println(ttree.toString) // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/files/run/reify_anonymous.scala b/test/files/run/reify_anonymous.scala index 1e7f3fe856..af16f2f8fd 100644 --- a/test/files/run/reify_anonymous.scala +++ b/test/files/run/reify_anonymous.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_classfileann_a.scala b/test/files/run/reify_classfileann_a.scala index 6bf4750bbc..c77bd3b8a2 100644 --- a/test/files/run/reify_classfileann_a.scala +++ b/test/files/run/reify_classfileann_a.scala @@ -20,5 +20,5 @@ object Test extends App { println(ttree.toString) // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/files/run/reify_closure1.scala b/test/files/run/reify_closure1.scala index 960f6aec3e..7cb3aff17d 100644 --- a/test/files/run/reify_closure1.scala +++ b/test/files/run/reify_closure1.scala @@ -10,8 +10,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure2a.scala b/test/files/run/reify_closure2a.scala index 6c28514c2b..cf367aa63f 100644 --- a/test/files/run/reify_closure2a.scala +++ b/test/files/run/reify_closure2a.scala @@ -10,8 +10,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure3a.scala b/test/files/run/reify_closure3a.scala index 4444c55ddf..d322b970b6 100644 --- a/test/files/run/reify_closure3a.scala +++ b/test/files/run/reify_closure3a.scala @@ -12,8 +12,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure4a.scala b/test/files/run/reify_closure4a.scala index 886e643a47..bbedd7e092 100644 --- a/test/files/run/reify_closure4a.scala +++ b/test/files/run/reify_closure4a.scala @@ -12,8 +12,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure5a.scala b/test/files/run/reify_closure5a.scala index 20994abff0..193e18103a 100644 --- a/test/files/run/reify_closure5a.scala +++ b/test/files/run/reify_closure5a.scala @@ -10,8 +10,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure6.scala b/test/files/run/reify_closure6.scala index 192c08f701..6aff83cb94 100644 --- a/test/files/run/reify_closure6.scala +++ b/test/files/run/reify_closure6.scala @@ -17,8 +17,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure7.scala b/test/files/run/reify_closure7.scala index 942c2cda9c..46002d8d6c 100644 --- a/test/files/run/reify_closure7.scala +++ b/test/files/run/reify_closure7.scala @@ -19,8 +19,7 @@ object Test extends App { if (clo == null) { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun.tree) clo = dyn.asInstanceOf[Int => Int] } diff --git a/test/files/run/reify_closure8a.scala b/test/files/run/reify_closure8a.scala index 5e54bfc8c7..805d8ff855 100644 --- a/test/files/run/reify_closure8a.scala +++ b/test/files/run/reify_closure8a.scala @@ -10,8 +10,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(10).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(10).fun.tree) val foo = dyn.asInstanceOf[Int] println(foo) } diff --git a/test/files/run/reify_closures10.scala b/test/files/run/reify_closures10.scala index d0f895ae4d..b6ec8e8911 100644 --- a/test/files/run/reify_closures10.scala +++ b/test/files/run/reify_closures10.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - println(toolbox.runExpr(ttree)) + println(toolbox.runExpr(code.tree)) } diff --git a/test/files/run/reify_complex.scala b/test/files/run/reify_complex.scala index aae4d558cf..0d9aeb28c5 100644 --- a/test/files/run/reify_complex.scala +++ b/test/files/run/reify_complex.scala @@ -26,6 +26,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_extendbuiltins.scala b/test/files/run/reify_extendbuiltins.scala index 57acd699ff..0aaec7cdf2 100644 --- a/test/files/run/reify_extendbuiltins.scala +++ b/test/files/run/reify_extendbuiltins.scala @@ -16,6 +16,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_for1.scala b/test/files/run/reify_for1.scala index 4b03330293..d1b60d878b 100644 --- a/test/files/run/reify_for1.scala +++ b/test/files/run/reify_for1.scala @@ -11,6 +11,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_fors.check b/test/files/run/reify_fors.check new file mode 100644 index 0000000000..eefddedc20 --- /dev/null +++ b/test/files/run/reify_fors.check @@ -0,0 +1,5 @@ +Persons over 20: John Richard +divisors(34) = List(1, 2, 17, 34) +findNums(15) = (4,1) (5,2) (6,1) (7,4) (8,3) (8,5) (9,2) (9,4) (10,1) (10,3) (10,7) (11,2) (11,6) (11,8) (12,1) (12,5) (12,7) (13,4) (13,6) (13,10) (14,3) (14,5) (14,9) +average(List(3.5, 5.0, 4.5)) = 4.333333333333333 +scalProd(List(3.5, 5.0, 4.5), List(2.0, 1.0, 3.0)) = 25.5 diff --git a/test/files/run/reify_fors.scala b/test/files/run/reify_fors.scala new file mode 100644 index 0000000000..27ee85d18b --- /dev/null +++ b/test/files/run/reify_fors.scala @@ -0,0 +1,106 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object Persons { + /** A list of persons. To create a list, we use Predef.List + * which takes a variable number of arguments and constructs + * a list out of them. + */ + val persons = List( + new Person("Bob", 17), + new Person("John", 40), + new Person("Richard", 68) + ) + + /** A Person class. 'val' constructor parameters become + * public members of the class. + */ + class Person(val name: String, val age: Int) + + /** Return an iterator over persons that are older than 20. + */ + def olderThan20(xs: Seq[Person]): Iterator[String] = + olderThan20(xs.iterator) + + /** Return an iterator over persons older than 20, given + * an iterator over persons. + */ + def olderThan20(xs: Iterator[Person]): Iterator[String] = { + + // The first expression is called a 'generator' and makes + // 'p' take values from 'xs'. The second expression is + // called a 'filter' and it is a boolean expression which + // selects only persons older than 20. There can be more than + // one generator and filter. The 'yield' expression is evaluated + // for each 'p' which satisfies the filters and used to assemble + // the resulting iterator + for (p <- xs if p.age > 20) yield p.name + } + } + + /** Some functions over lists of numbers which demonstrate + * the use of for comprehensions. + */ + object Numeric { + + /** Return the divisors of n. */ + def divisors(n: Int): List[Int] = + for (i <- List.range(1, n+1) if n % i == 0) yield i + + /** Is 'n' a prime number? */ + def isPrime(n: Int) = divisors(n).length == 2 + + /** Return pairs of numbers whose sum is prime. */ + def findNums(n: Int): Iterable[(Int, Int)] = { + + // a for comprehension using two generators + for (i <- 1 until n; + j <- 1 until (i-1); + if isPrime(i + j)) yield (i, j) + } + + /** Return the sum of the elements of 'xs'. */ + def sum(xs: List[Double]): Double = + xs.foldLeft(0.0) { (x, y) => x + y } + + /** Return the sum of pairwise product of the two lists. */ + def scalProd(xs: List[Double], ys: List[Double]) = + sum(for((x, y) <- xs zip ys) yield x * y); + + /** Remove duplicate elements in 'xs'. */ + def removeDuplicates[A](xs: List[A]): List[A] = + if (xs.isEmpty) + xs + else + xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x) + } + + // import all members of object 'persons' in the current scope + import Persons._ + + print("Persons over 20:") + olderThan20(persons) foreach { x => print(" " + x) } + println + + import Numeric._ + + println("divisors(34) = " + divisors(34)) + + print("findNums(15) =") + findNums(15) foreach { x => print(" " + x) } + println + + val xs = List(3.5, 5.0, 4.5) + println("average(" + xs + ") = " + sum(xs) / xs.length) + + val ys = List(2.0, 1.0, 3.0) + println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys)) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/files/run/reify_generic.scala b/test/files/run/reify_generic.scala index aef038b2d8..6a4ff148c4 100644 --- a/test/files/run/reify_generic.scala +++ b/test/files/run/reify_generic.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_generic2.scala b/test/files/run/reify_generic2.scala index d03fe7602b..9413f41eb5 100644 --- a/test/files/run/reify_generic2.scala +++ b/test/files/run/reify_generic2.scala @@ -11,6 +11,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_getter.scala b/test/files/run/reify_getter.scala index 83eaded506..33f36888a7 100644 --- a/test/files/run/reify_getter.scala +++ b/test/files/run/reify_getter.scala @@ -13,7 +13,6 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(code.tree) println("evaluated = " + evaluated) } diff --git a/test/files/run/reify_implicits.scala b/test/files/run/reify_implicits.scala index a15cef9c97..953eabe6c2 100644 --- a/test/files/run/reify_implicits.scala +++ b/test/files/run/reify_implicits.scala @@ -16,6 +16,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_inheritance.scala b/test/files/run/reify_inheritance.scala index 2a1b5f764f..78a64c264e 100644 --- a/test/files/run/reify_inheritance.scala +++ b/test/files/run/reify_inheritance.scala @@ -18,6 +18,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_inner1.scala b/test/files/run/reify_inner1.scala index 69931198e0..546fe36d16 100644 --- a/test/files/run/reify_inner1.scala +++ b/test/files/run/reify_inner1.scala @@ -17,6 +17,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_inner2.scala b/test/files/run/reify_inner2.scala index 0f12fd472a..613614b989 100644 --- a/test/files/run/reify_inner2.scala +++ b/test/files/run/reify_inner2.scala @@ -17,6 +17,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_inner3.scala b/test/files/run/reify_inner3.scala index 6b97b42b34..e9fb636dce 100644 --- a/test/files/run/reify_inner3.scala +++ b/test/files/run/reify_inner3.scala @@ -17,6 +17,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_inner4.scala b/test/files/run/reify_inner4.scala index de8c973b09..33870b0983 100644 --- a/test/files/run/reify_inner4.scala +++ b/test/files/run/reify_inner4.scala @@ -17,6 +17,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_maps.check b/test/files/run/reify_maps.check new file mode 100644 index 0000000000..08cbbb1359 --- /dev/null +++ b/test/files/run/reify_maps.check @@ -0,0 +1,4 @@ +red has code: 16711680 +Unknown color: green +Unknown color: blue +turquoise has code: 65535 diff --git a/test/files/run/reify_maps.scala b/test/files/run/reify_maps.scala new file mode 100644 index 0000000000..d3d95ffa24 --- /dev/null +++ b/test/files/run/reify_maps.scala @@ -0,0 +1,25 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + val colors = Map("red" -> 0xFF0000, + "turquoise" -> 0x00FFFF, + "black" -> 0x000000, + "orange" -> 0xFF8040, + "brown" -> 0x804000) + for (name <- List("red", "green", "blue", "turquoise")) println( + colors.get(name) match { + case Some(code) => + name + " has code: " + code + case None => + "Unknown color: " + name + } + ) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/files/run/reify_printf.scala b/test/files/run/reify_printf.scala index 30901b98c2..cd6052bc5e 100644 --- a/test/files/run/reify_printf.scala +++ b/test/files/run/reify_printf.scala @@ -14,11 +14,10 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter, args mkString " ") - val ttree = toolbox.typeCheck(tree) val output = new ByteArrayOutputStream() Console.setOut(new PrintStream(output)) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(tree) assert(output.toString() == "hello world", output.toString() +" == hello world") diff --git a/test/files/run/reify_sort.scala b/test/files/run/reify_sort.scala index 42991fe5d2..5984a64967 100644 --- a/test/files/run/reify_sort.scala +++ b/test/files/run/reify_sort.scala @@ -52,6 +52,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_sort1.scala b/test/files/run/reify_sort1.scala index 42f4c824a5..6f365dea26 100644 --- a/test/files/run/reify_sort1.scala +++ b/test/files/run/reify_sort1.scala @@ -22,6 +22,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/reify_this.scala b/test/files/run/reify_this.scala index 44a25ae1b6..ee1f116013 100644 --- a/test/files/run/reify_this.scala +++ b/test/files/run/reify_this.scala @@ -11,8 +11,7 @@ trait Eval { val settings = new Settings val reporter = new ConsoleReporter(settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(tree) - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } } diff --git a/test/files/run/reify_timeofday.check b/test/files/run/reify_timeofday.check new file mode 100644 index 0000000000..3fd3b76a62 --- /dev/null +++ b/test/files/run/reify_timeofday.check @@ -0,0 +1 @@ +DateError diff --git a/test/files/run/reify_timeofday.scala b/test/files/run/reify_timeofday.scala new file mode 100644 index 0000000000..122d7a6d52 --- /dev/null +++ b/test/files/run/reify_timeofday.scala @@ -0,0 +1,47 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + class DateError extends Exception + + /** Simulating properties in Scala + * (example 4.2.1 in ScalaReference.pdf) + */ + class TimeOfDayVar { + private var h, m, s: Int = 0 + + def hours = h + + /** A method 'ident_=' is a setter for 'ident'. 'code.ident = ...' will + * be translated to a call to 'ident_=' + */ + def hours_= (h: Int) = + if (0 <= h && h < 24) this.h = h + else throw new DateError() + + def minutes = m + def minutes_= (m: Int) = + if (0 <= m && m < 60) this.m = m + else throw new DateError() + + def seconds = s + def seconds_= (s: Int) = + if (0 <= s && s < 60) this.s = s + else throw new DateError() + } + + val d = new TimeOfDayVar + d.hours = 8; d.minutes = 30; d.seconds = 0 + try { d.hours = 25 // throws a DateError exception + } catch { + case de: DateError => println("DateError") + case e: Exception => println("Exception") + } + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/files/run/reify_varargs.scala b/test/files/run/reify_varargs.scala index d38cbf2aac..175cfb5db0 100644 --- a/test/files/run/reify_varargs.scala +++ b/test/files/run/reify_varargs.scala @@ -12,6 +12,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5229_1.scala b/test/files/run/t5229_1.scala index 1d7bf0590b..d5af569656 100644 --- a/test/files/run/t5229_1.scala +++ b/test/files/run/t5229_1.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5229_2.check b/test/files/run/t5229_2.check index 5db6ec9b38..43c25b96af 100644 --- a/test/files/run/t5229_2.check +++ b/test/files/run/t5229_2.check @@ -1,2 +1,2 @@ 2 -evaluated = null +evaluated = () diff --git a/test/files/run/t5229_2.scala b/test/files/run/t5229_2.scala index 67be7328a6..07f9ac6b84 100644 --- a/test/files/run/t5229_2.scala +++ b/test/files/run/t5229_2.scala @@ -13,7 +13,6 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(code.tree) println("evaluated = " + evaluated) } diff --git a/test/files/run/t5230.check b/test/files/run/t5230.check index 5db6ec9b38..43c25b96af 100644 --- a/test/files/run/t5230.check +++ b/test/files/run/t5230.check @@ -1,2 +1,2 @@ 2 -evaluated = null +evaluated = () diff --git a/test/files/run/t5230.scala b/test/files/run/t5230.scala index 5aab8f9290..d3106ca05c 100644 --- a/test/files/run/t5230.scala +++ b/test/files/run/t5230.scala @@ -13,7 +13,6 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(code.tree) println("evaluated = " + evaluated) } diff --git a/test/files/run/t5258a.scala b/test/files/run/t5258a.scala index deabb8310f..8cc4249e06 100644 --- a/test/files/run/t5258a.scala +++ b/test/files/run/t5258a.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } \ No newline at end of file diff --git a/test/files/run/t5266_1.check b/test/files/run/t5266_1.check index 3feac16a0b..35f20802ee 100644 --- a/test/files/run/t5266_1.check +++ b/test/files/run/t5266_1.check @@ -1,2 +1,2 @@ 2 -evaluated = null \ No newline at end of file +evaluated = () \ No newline at end of file diff --git a/test/files/run/t5266_1.scala b/test/files/run/t5266_1.scala index 18e288e685..4262bc7a7b 100644 --- a/test/files/run/t5266_1.scala +++ b/test/files/run/t5266_1.scala @@ -10,7 +10,6 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(code.tree) println("evaluated = " + evaluated) } \ No newline at end of file diff --git a/test/files/run/t5266_2.check b/test/files/run/t5266_2.check index 3feac16a0b..35f20802ee 100644 --- a/test/files/run/t5266_2.check +++ b/test/files/run/t5266_2.check @@ -1,2 +1,2 @@ 2 -evaluated = null \ No newline at end of file +evaluated = () \ No newline at end of file diff --git a/test/files/run/t5266_2.scala b/test/files/run/t5266_2.scala index eb319583f8..d0f718dbd7 100644 --- a/test/files/run/t5266_2.scala +++ b/test/files/run/t5266_2.scala @@ -11,7 +11,6 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - val evaluated = toolbox.runExpr(ttree) + val evaluated = toolbox.runExpr(code.tree) println("evaluated = " + evaluated) } diff --git a/test/files/run/t5269.scala b/test/files/run/t5269.scala index a30509f3fe..cab99f17e6 100644 --- a/test/files/run/t5269.scala +++ b/test/files/run/t5269.scala @@ -17,6 +17,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5270.scala b/test/files/run/t5270.scala index 10f79790b0..934cc13dea 100644 --- a/test/files/run/t5270.scala +++ b/test/files/run/t5270.scala @@ -21,6 +21,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5271_1.check b/test/files/run/t5271_1.check index e69de29bb2..d4fd544e88 100644 --- a/test/files/run/t5271_1.check +++ b/test/files/run/t5271_1.check @@ -0,0 +1,11 @@ +{ + case class C extends Object with ScalaObject with Product with Serializable { + val foo : Int = _; + val bar : Int = _; + def (foo: Int, bar: Int) = { + super.(); + () + } + }; + () +} diff --git a/test/files/run/t5271_1.scala b/test/files/run/t5271_1.scala index 5f10e64528..fbc57aead7 100644 --- a/test/files/run/t5271_1.scala +++ b/test/files/run/t5271_1.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + println(code.tree) } diff --git a/test/files/run/t5271_2.check b/test/files/run/t5271_2.check index b8626c4cff..5a519f265f 100644 --- a/test/files/run/t5271_2.check +++ b/test/files/run/t5271_2.check @@ -1 +1,12 @@ -4 +{ + case class C extends Object with ScalaObject with Product with Serializable { + val foo : Int = _; + val bar : Int = _; + def (foo: Int, bar: Int) = { + super.(); + () + } + }; + val c = C.apply(2, 2); + scala.this.Predef.println(c.foo.$times(c.bar)) +} diff --git a/test/files/run/t5271_2.scala b/test/files/run/t5271_2.scala index 71967c04ed..4bfc574e00 100644 --- a/test/files/run/t5271_2.scala +++ b/test/files/run/t5271_2.scala @@ -11,6 +11,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + println(code.tree) } diff --git a/test/files/run/t5271_3.check b/test/files/run/t5271_3.check index f32a5804e2..be87696f02 100644 --- a/test/files/run/t5271_3.check +++ b/test/files/run/t5271_3.check @@ -1 +1,19 @@ -true \ No newline at end of file +{ + object C extends Object with ScalaObject with Serializable { + def () = { + super.(); + () + }; + def qwe: Int = 4 + }; + case class C extends Object with ScalaObject with Product with Serializable { + val foo : Int = _; + val bar : Int = _; + def (foo: Int, bar: Int) = { + super.(); + () + } + }; + val c = C.apply(2, 2); + scala.this.Predef.println(c.foo.$times(c.bar).$eq$eq(C.qwe)) +} diff --git a/test/files/run/t5271_3.scala b/test/files/run/t5271_3.scala index bfa116c691..a085bdca4c 100644 --- a/test/files/run/t5271_3.scala +++ b/test/files/run/t5271_3.scala @@ -12,6 +12,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + println(code.tree) } diff --git a/test/files/run/t5271_4.scala b/test/files/run/t5271_4.scala index e5e16033e8..c253b1adca 100644 --- a/test/files/run/t5271_4.scala +++ b/test/files/run/t5271_4.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5272_1.scala b/test/files/run/t5272_1.scala index 3f44d05fb3..882287f033 100644 --- a/test/files/run/t5272_1.scala +++ b/test/files/run/t5272_1.scala @@ -12,6 +12,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5272_2.scala b/test/files/run/t5272_2.scala index 833ee65285..48b6a670bb 100644 --- a/test/files/run/t5272_2.scala +++ b/test/files/run/t5272_2.scala @@ -11,6 +11,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5273_1.scala b/test/files/run/t5273_1.scala index 1175881c9f..80460a4ae6 100644 --- a/test/files/run/t5273_1.scala +++ b/test/files/run/t5273_1.scala @@ -12,6 +12,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5273_2a.scala b/test/files/run/t5273_2a.scala index 12ddbb280a..a7a336d8a7 100644 --- a/test/files/run/t5273_2a.scala +++ b/test/files/run/t5273_2a.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5273_2b.scala b/test/files/run/t5273_2b.scala index 8b75084463..85c40f0607 100644 --- a/test/files/run/t5273_2b.scala +++ b/test/files/run/t5273_2b.scala @@ -11,6 +11,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5274_1.scala b/test/files/run/t5274_1.scala index c501172518..74a5b81bcb 100644 --- a/test/files/run/t5274_1.scala +++ b/test/files/run/t5274_1.scala @@ -15,6 +15,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5274_2.scala b/test/files/run/t5274_2.scala index 42991fe5d2..5984a64967 100644 --- a/test/files/run/t5274_2.scala +++ b/test/files/run/t5274_2.scala @@ -52,6 +52,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5275.scala b/test/files/run/t5275.scala index d419834ded..285d8a18a4 100644 --- a/test/files/run/t5275.scala +++ b/test/files/run/t5275.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5276_1a.scala b/test/files/run/t5276_1a.scala index c8afbba19e..b717675824 100644 --- a/test/files/run/t5276_1a.scala +++ b/test/files/run/t5276_1a.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5276_1b.scala b/test/files/run/t5276_1b.scala index 31582201fb..1ff25504ca 100644 --- a/test/files/run/t5276_1b.scala +++ b/test/files/run/t5276_1b.scala @@ -5,11 +5,10 @@ import reflect.runtime.Mirror.ToolBox object Test extends App { val code = scala.reflect.Code.lift{ implicit lazy val x = 2 - implicitly[Int] + println(implicitly[Int]) }; val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5276_2a.scala b/test/files/run/t5276_2a.scala index 179c14b739..af5ff2a565 100644 --- a/test/files/run/t5276_2a.scala +++ b/test/files/run/t5276_2a.scala @@ -13,6 +13,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5276_2b.scala b/test/files/run/t5276_2b.scala index 6fe2873fef..63904b2898 100644 --- a/test/files/run/t5276_2b.scala +++ b/test/files/run/t5276_2b.scala @@ -14,6 +14,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5277_1.scala b/test/files/run/t5277_1.scala index 57acd699ff..0aaec7cdf2 100644 --- a/test/files/run/t5277_1.scala +++ b/test/files/run/t5277_1.scala @@ -16,6 +16,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5277_2.scala b/test/files/run/t5277_2.scala index 67b6b000bc..91ed55122a 100644 --- a/test/files/run/t5277_2.scala +++ b/test/files/run/t5277_2.scala @@ -13,6 +13,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5279.scala b/test/files/run/t5279.scala index 39e7dd2c66..cef58535d5 100644 --- a/test/files/run/t5279.scala +++ b/test/files/run/t5279.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/files/run/t5334_1.check b/test/files/run/t5334_1.check index e09aedaede..96d80cd6c4 100644 --- a/test/files/run/t5334_1.check +++ b/test/files/run/t5334_1.check @@ -1,2 +1 @@ -C C \ No newline at end of file diff --git a/test/files/run/t5334_1.scala b/test/files/run/t5334_1.scala index 7acf282bb8..9887bebf78 100644 --- a/test/files/run/t5334_1.scala +++ b/test/files/run/t5334_1.scala @@ -10,7 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - println(ttree.tpe) - println(toolbox.runExpr(ttree)) + println(toolbox.runExpr(code.tree)) } diff --git a/test/files/run/t5334_2.check b/test/files/run/t5334_2.check index 2ae76754c0..613d286a18 100644 --- a/test/files/run/t5334_2.check +++ b/test/files/run/t5334_2.check @@ -1,2 +1 @@ -List[(C, C)] List((C,C)) \ No newline at end of file diff --git a/test/files/run/t5334_2.scala b/test/files/run/t5334_2.scala index 26f0778400..775a05aaf7 100644 --- a/test/files/run/t5334_2.scala +++ b/test/files/run/t5334_2.scala @@ -10,7 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - println(ttree.tpe) - println(toolbox.runExpr(ttree)) + println(toolbox.runExpr(code.tree)) } diff --git a/test/files/run/t5335.scala b/test/files/run/t5335.scala index 9a8b91f04d..8e2ed59db6 100644 --- a/test/files/run/t5335.scala +++ b/test/files/run/t5335.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_addressbook.scala b/test/pending/run/reify_addressbook.scala index 225f26b75e..54dd5545bd 100644 --- a/test/pending/run/reify_addressbook.scala +++ b/test/pending/run/reify_addressbook.scala @@ -66,6 +66,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_brainf_ck.scala b/test/pending/run/reify_brainf_ck.scala index 3bfb76c9ea..0034644b81 100644 --- a/test/pending/run/reify_brainf_ck.scala +++ b/test/pending/run/reify_brainf_ck.scala @@ -80,6 +80,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_callccinterpreter.scala b/test/pending/run/reify_callccinterpreter.scala index c10f4f0b4e..96ae9c5c17 100644 --- a/test/pending/run/reify_callccinterpreter.scala +++ b/test/pending/run/reify_callccinterpreter.scala @@ -89,6 +89,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_classfileann_b.scala b/test/pending/run/reify_classfileann_b.scala index b76dd8fc9f..c31826377a 100644 --- a/test/pending/run/reify_classfileann_b.scala +++ b/test/pending/run/reify_classfileann_b.scala @@ -17,12 +17,8 @@ object Test extends App { }.tree println(tree.toString) - // test 2: import and typecheck + // test 2: import and compile val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(tree) - println(ttree.toString) - - // test 3: import and compile - toolbox.runExpr(ttree) + toolbox.runExpr(tree) } \ No newline at end of file diff --git a/test/pending/run/reify_closure2b.scala b/test/pending/run/reify_closure2b.scala index a1fead07ae..b9c0063290 100644 --- a/test/pending/run/reify_closure2b.scala +++ b/test/pending/run/reify_closure2b.scala @@ -12,8 +12,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(y).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(y).fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/pending/run/reify_closure3b.scala b/test/pending/run/reify_closure3b.scala index acf07c4749..8f161dbff3 100644 --- a/test/pending/run/reify_closure3b.scala +++ b/test/pending/run/reify_closure3b.scala @@ -14,8 +14,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(y).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(y).fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/pending/run/reify_closure4b.scala b/test/pending/run/reify_closure4b.scala index ed102298c5..238795d4dd 100644 --- a/test/pending/run/reify_closure4b.scala +++ b/test/pending/run/reify_closure4b.scala @@ -14,8 +14,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(y).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(y).fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/pending/run/reify_closure5b.scala b/test/pending/run/reify_closure5b.scala index 29e911538f..bdb2583e8a 100644 --- a/test/pending/run/reify_closure5b.scala +++ b/test/pending/run/reify_closure5b.scala @@ -12,8 +12,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(ys).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(ys).fun.tree) dyn.asInstanceOf[Int => Int] } diff --git a/test/pending/run/reify_closure8b.scala b/test/pending/run/reify_closure8b.scala index 9e37e4e09a..38031c217b 100644 --- a/test/pending/run/reify_closure8b.scala +++ b/test/pending/run/reify_closure8b.scala @@ -10,8 +10,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(10).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(10).fun.tree) val foo = dyn.asInstanceOf[Int] println(foo) } diff --git a/test/pending/run/reify_closure9a.scala b/test/pending/run/reify_closure9a.scala index f3ee153d3c..185f4ffca1 100644 --- a/test/pending/run/reify_closure9a.scala +++ b/test/pending/run/reify_closure9a.scala @@ -11,8 +11,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(y).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(y).fun.tree) dyn.asInstanceOf[Int] } diff --git a/test/pending/run/reify_closure9b.scala b/test/pending/run/reify_closure9b.scala index 8d349e8701..ad279fac6d 100644 --- a/test/pending/run/reify_closure9b.scala +++ b/test/pending/run/reify_closure9b.scala @@ -11,8 +11,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(new Foo(y).fun.tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(new Foo(y).fun.tree) dyn.asInstanceOf[Int] } diff --git a/test/pending/run/reify_closures11.scala b/test/pending/run/reify_closures11.scala index 42053bd029..2c4177b8f2 100644 --- a/test/pending/run/reify_closures11.scala +++ b/test/pending/run/reify_closures11.scala @@ -11,8 +11,7 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(fun().tree) - val dyn = toolbox.runExpr(ttree) + val dyn = toolbox.runExpr(fun().tree) val foo = dyn.asInstanceOf[Int] println(foo) } diff --git a/test/pending/run/reify_csv.scala b/test/pending/run/reify_csv.scala index a05a3b55d4..a6a616fab0 100644 --- a/test/pending/run/reify_csv.scala +++ b/test/pending/run/reify_csv.scala @@ -37,6 +37,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_fors.check b/test/pending/run/reify_fors.check deleted file mode 100644 index eefddedc20..0000000000 --- a/test/pending/run/reify_fors.check +++ /dev/null @@ -1,5 +0,0 @@ -Persons over 20: John Richard -divisors(34) = List(1, 2, 17, 34) -findNums(15) = (4,1) (5,2) (6,1) (7,4) (8,3) (8,5) (9,2) (9,4) (10,1) (10,3) (10,7) (11,2) (11,6) (11,8) (12,1) (12,5) (12,7) (13,4) (13,6) (13,10) (14,3) (14,5) (14,9) -average(List(3.5, 5.0, 4.5)) = 4.333333333333333 -scalProd(List(3.5, 5.0, 4.5), List(2.0, 1.0, 3.0)) = 25.5 diff --git a/test/pending/run/reify_fors.scala b/test/pending/run/reify_fors.scala deleted file mode 100644 index f3556514a9..0000000000 --- a/test/pending/run/reify_fors.scala +++ /dev/null @@ -1,107 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - object Persons { - /** A list of persons. To create a list, we use Predef.List - * which takes a variable number of arguments and constructs - * a list out of them. - */ - val persons = List( - new Person("Bob", 17), - new Person("John", 40), - new Person("Richard", 68) - ) - - /** A Person class. 'val' constructor parameters become - * public members of the class. - */ - class Person(val name: String, val age: Int) - - /** Return an iterator over persons that are older than 20. - */ - def olderThan20(xs: Seq[Person]): Iterator[String] = - olderThan20(xs.elements) - - /** Return an iterator over persons older than 20, given - * an iterator over persons. - */ - def olderThan20(xs: Iterator[Person]): Iterator[String] = { - - // The first expression is called a 'generator' and makes - // 'p' take values from 'xs'. The second expression is - // called a 'filter' and it is a boolean expression which - // selects only persons older than 20. There can be more than - // one generator and filter. The 'yield' expression is evaluated - // for each 'p' which satisfies the filters and used to assemble - // the resulting iterator - for (p <- xs if p.age > 20) yield p.name - } - } - - /** Some functions over lists of numbers which demonstrate - * the use of for comprehensions. - */ - object Numeric { - - /** Return the divisors of n. */ - def divisors(n: Int): List[Int] = - for (i <- List.range(1, n+1) if n % i == 0) yield i - - /** Is 'n' a prime number? */ - def isPrime(n: Int) = divisors(n).length == 2 - - /** Return pairs of numbers whose sum is prime. */ - def findNums(n: Int): Iterable[(Int, Int)] = { - - // a for comprehension using two generators - for (i <- 1 until n; - j <- 1 until (i-1); - if isPrime(i + j)) yield (i, j) - } - - /** Return the sum of the elements of 'xs'. */ - def sum(xs: List[Double]): Double = - xs.foldLeft(0.0) { (x, y) => x + y } - - /** Return the sum of pairwise product of the two lists. */ - def scalProd(xs: List[Double], ys: List[Double]) = - sum(for((x, y) <- xs zip ys) yield x * y); - - /** Remove duplicate elements in 'xs'. */ - def removeDuplicates[A](xs: List[A]): List[A] = - if (xs.isEmpty) - xs - else - xs.head :: removeDuplicates(for (x <- xs.tail if x != xs.head) yield x) - } - - // import all members of object 'persons' in the current scope - import Persons._ - - print("Persons over 20:") - olderThan20(persons) foreach { x => print(" " + x) } - println - - import Numeric._ - - println("divisors(34) = " + divisors(34)) - - print("findNums(15) =") - findNums(15) foreach { x => print(" " + x) } - println - - val xs = List(3.5, 5.0, 4.5) - println("average(" + xs + ") = " + sum(xs) / xs.length) - - val ys = List(2.0, 1.0, 3.0) - println("scalProd(" + xs + ", " + ys +") = " + scalProd(xs, ys)) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_gadts.scala b/test/pending/run/reify_gadts.scala index 7077de735c..9feb7a5726 100644 --- a/test/pending/run/reify_gadts.scala +++ b/test/pending/run/reify_gadts.scala @@ -40,6 +40,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_lazyevaluation.scala b/test/pending/run/reify_lazyevaluation.scala index f38af76751..0720a7c979 100644 --- a/test/pending/run/reify_lazyevaluation.scala +++ b/test/pending/run/reify_lazyevaluation.scala @@ -60,6 +60,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_maps.check b/test/pending/run/reify_maps.check deleted file mode 100644 index 08cbbb1359..0000000000 --- a/test/pending/run/reify_maps.check +++ /dev/null @@ -1,4 +0,0 @@ -red has code: 16711680 -Unknown color: green -Unknown color: blue -turquoise has code: 65535 diff --git a/test/pending/run/reify_maps.scala b/test/pending/run/reify_maps.scala deleted file mode 100644 index 589b28d049..0000000000 --- a/test/pending/run/reify_maps.scala +++ /dev/null @@ -1,26 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - val colors = Map("red" -> 0xFF0000, - "turquoise" -> 0x00FFFF, - "black" -> 0x000000, - "orange" -> 0xFF8040, - "brown" -> 0x804000) - for (name <- List("red", "green", "blue", "turquoise")) println( - colors.get(name) match { - case Some(code) => - name + " has code: " + code - case None => - "Unknown color: " + name - } - ) - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/reify_properties.scala b/test/pending/run/reify_properties.scala index 2115a96715..265c344b8e 100644 --- a/test/pending/run/reify_properties.scala +++ b/test/pending/run/reify_properties.scala @@ -58,6 +58,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_simpleinterpreter.scala b/test/pending/run/reify_simpleinterpreter.scala index b39f5583bb..4762afb3cc 100644 --- a/test/pending/run/reify_simpleinterpreter.scala +++ b/test/pending/run/reify_simpleinterpreter.scala @@ -77,6 +77,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } diff --git a/test/pending/run/reify_timeofday.check b/test/pending/run/reify_timeofday.check deleted file mode 100644 index 3fd3b76a62..0000000000 --- a/test/pending/run/reify_timeofday.check +++ /dev/null @@ -1 +0,0 @@ -DateError diff --git a/test/pending/run/reify_timeofday.scala b/test/pending/run/reify_timeofday.scala deleted file mode 100644 index 6bd11b0d30..0000000000 --- a/test/pending/run/reify_timeofday.scala +++ /dev/null @@ -1,48 +0,0 @@ -import scala.tools.nsc.reporters._ -import scala.tools.nsc.Settings -import reflect.runtime.Mirror.ToolBox - -object Test extends App { - val code = scala.reflect.Code.lift{ - class DateError extends Exception - - /** Simulating properties in Scala - * (example 4.2.1 in ScalaReference.pdf) - */ - class TimeOfDayVar { - private var h, m, s: Int = 0 - - def hours = h - - /** A method 'ident_=' is a setter for 'ident'. 'code.ident = ...' will - * be translated to a call to 'ident_=' - */ - def hours_= (h: Int) = - if (0 <= h && h < 24) this.h = h - else throw new DateError() - - def minutes = m - def minutes_= (m: Int) = - if (0 <= m && m < 60) this.m = m - else throw new DateError() - - def seconds = s - def seconds_= (s: Int) = - if (0 <= s && s < 60) this.s = s - else throw new DateError() - } - - val d = new TimeOfDayVar - d.hours = 8; d.minutes = 30; d.seconds = 0 - try { d.hours = 25 // throws a DateError exception - } catch { - case de: DateError => println("DateError") - case e: Exception => println("Exception") - } - }; - - val reporter = new ConsoleReporter(new Settings) - val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) -} diff --git a/test/pending/run/t5258b.scala b/test/pending/run/t5258b.scala index 70cb4a7f4e..3a603095b3 100644 --- a/test/pending/run/t5258b.scala +++ b/test/pending/run/t5258b.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } \ No newline at end of file diff --git a/test/pending/run/t5258c.scala b/test/pending/run/t5258c.scala index a93170d0d6..b0d16ba0b1 100644 --- a/test/pending/run/t5258c.scala +++ b/test/pending/run/t5258c.scala @@ -10,6 +10,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } \ No newline at end of file diff --git a/test/pending/run/t5271_1.check b/test/pending/run/t5271_1.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/t5271_1.scala b/test/pending/run/t5271_1.scala new file mode 100644 index 0000000000..afbd8fe465 --- /dev/null +++ b/test/pending/run/t5271_1.scala @@ -0,0 +1,13 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + case class C(foo: Int, bar: Int) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/pending/run/t5271_2.check b/test/pending/run/t5271_2.check new file mode 100644 index 0000000000..b8626c4cff --- /dev/null +++ b/test/pending/run/t5271_2.check @@ -0,0 +1 @@ +4 diff --git a/test/pending/run/t5271_2.scala b/test/pending/run/t5271_2.scala new file mode 100644 index 0000000000..d85d945973 --- /dev/null +++ b/test/pending/run/t5271_2.scala @@ -0,0 +1,15 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + case class C(foo: Int, bar: Int) + val c = C(2, 2) + println(c.foo * c.bar) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/pending/run/t5271_3.check b/test/pending/run/t5271_3.check new file mode 100644 index 0000000000..f32a5804e2 --- /dev/null +++ b/test/pending/run/t5271_3.check @@ -0,0 +1 @@ +true \ No newline at end of file diff --git a/test/pending/run/t5271_3.scala b/test/pending/run/t5271_3.scala new file mode 100644 index 0000000000..5a624de903 --- /dev/null +++ b/test/pending/run/t5271_3.scala @@ -0,0 +1,16 @@ +import scala.tools.nsc.reporters._ +import scala.tools.nsc.Settings +import reflect.runtime.Mirror.ToolBox + +object Test extends App { + val code = scala.reflect.Code.lift{ + object C { def qwe = 4 } + case class C(foo: Int, bar: Int) + val c = C(2, 2) + println(c.foo * c.bar == C.qwe) + }; + + val reporter = new ConsoleReporter(new Settings) + val toolbox = new ToolBox(reporter) + toolbox.runExpr(code.tree) +} diff --git a/test/pending/run/t5418.scala b/test/pending/run/t5418.scala index 065710f15e..fe813cf5ae 100644 --- a/test/pending/run/t5418.scala +++ b/test/pending/run/t5418.scala @@ -9,6 +9,5 @@ object Test extends App { val reporter = new ConsoleReporter(new Settings) val toolbox = new ToolBox(reporter) - val ttree = toolbox.typeCheck(code.tree) - toolbox.runExpr(ttree) + toolbox.runExpr(code.tree) } \ No newline at end of file -- cgit v1.2.3 From 2c0d3fe60a8ee4deece3b656c8a3fe39dc619b53 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 14 Feb 2012 16:20:44 -0800 Subject: Remove stray "desired" sha1. It was not desired. --- instrumented.jar.desired.sha1 | 1 - 1 file changed, 1 deletion(-) delete mode 100644 instrumented.jar.desired.sha1 diff --git a/instrumented.jar.desired.sha1 b/instrumented.jar.desired.sha1 deleted file mode 100644 index 4d31c9e54f..0000000000 --- a/instrumented.jar.desired.sha1 +++ /dev/null @@ -1 +0,0 @@ -6785cf706a8448f8600f06b4c25d1816800422ce ?instrumented.jar -- cgit v1.2.3 From 4c48abbe5a438b5c892ee096d816770213c54ef5 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 14 Feb 2012 16:29:50 -0800 Subject: Eliminating trailing whitespace in AnyVals. --- src/compiler/scala/tools/cmd/gen/AnyVals.scala | 30 +++++++++++++------------- 1 file changed, 15 insertions(+), 15 deletions(-) diff --git a/src/compiler/scala/tools/cmd/gen/AnyVals.scala b/src/compiler/scala/tools/cmd/gen/AnyVals.scala index 8f82c997db..7c9599dc45 100644 --- a/src/compiler/scala/tools/cmd/gen/AnyVals.scala +++ b/src/compiler/scala/tools/cmd/gen/AnyVals.scala @@ -30,7 +30,7 @@ trait AnyValReps { " * @return the bitwise negation of this value\n" + " * @example {{{\n" + " * ~5 == -6\n" + - " * // in binary: ~00000101 == \n" + + " * // in binary: ~00000101 ==\n" + " * // 11111010\n" + " * }}}\n" + " */") :: ops @@ -44,9 +44,9 @@ trait AnyValReps { " * @return the bitwise OR of this value and x\n" + " * @example {{{\n" + " * (0xf0 | 0xaa) == 0xfa\n" + - " * // in binary: 11110000 \n" + - " * // | 10101010 \n" + - " * // -------- \n" + + " * // in binary: 11110000\n" + + " * // | 10101010\n" + + " * // --------\n" + " * // 11111010\n" + " * }}}\n" + " */"), @@ -54,9 +54,9 @@ trait AnyValReps { " * @return the bitwise AND of this value and x\n" + " * @example {{{\n" + " * (0xf0 & 0xaa) == 0xa0\n" + - " * // in binary: 11110000 \n" + - " * // & 10101010 \n" + - " * // -------- \n" + + " * // in binary: 11110000\n" + + " * // & 10101010\n" + + " * // --------\n" + " * // 10100000\n" + " * }}}\n" + " */"), @@ -64,9 +64,9 @@ trait AnyValReps { " * @return the bitwise XOR of this value and x\n" + " * @example {{{\n" + " * (0xf0 ^ 0xaa) == 0x5a\n" + - " * // in binary: 11110000 \n" + - " * // ^ 10101010 \n" + - " * // -------- \n" + + " * // in binary: 11110000\n" + + " * // ^ 10101010\n" + + " * // --------\n" + " * // 01011010\n" + " * }}}\n" + " */")) @@ -83,11 +83,11 @@ trait AnyValReps { Op(">>>", "/**\n" + " * @return this value bit-shifted right by the specified number of bits,\n" + - " * filling the new left bits with zeroes. \n" + + " * filling the new left bits with zeroes.\n" + " * @example {{{ 21 >>> 3 == 2 // in binary: 010101 >>> 3 == 010 }}}\n" + " * @example {{{\n" + - " * -21 >>> 3 == 536870909 \n" + - " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 == \n" + + " * -21 >>> 3 == 536870909\n" + + " * // in binary: 11111111 11111111 11111111 11101011 >>> 3 ==\n" + " * // 00011111 11111111 11111111 11111101\n" + " * }}}\n" + " */"), @@ -97,8 +97,8 @@ trait AnyValReps { " * filling in the right bits with the same value as the left-most bit of this.\n" + " * The effect of this is to retain the sign of the value.\n" + " * @example {{{\n" + - " * -21 >> 3 == -3 \n" + - " * // in binary: 11111111 11111111 11111111 11101011 >> 3 == \n" + + " * -21 >> 3 == -3\n" + + " * // in binary: 11111111 11111111 11111111 11101011 >> 3 ==\n" + " * // 11111111 11111111 11111111 11111101\n" + " * }}}\n" + " */")) -- cgit v1.2.3 From bb23d766bceccecc99280b543001bc70e16afbc9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 14 Feb 2012 22:04:30 -0800 Subject: Specialization action. The crickets at http://www.scala-lang.org/node/11901 were in unanimous agreement that I should proceed as suggested. - No arguments to @specialize gets you 10/10, not 9/10 - Fixed bugs in AnyRef specialization revealed by trying to use it - Specialized Function1 on AnyRef. - Changed AnyRef specialization to use OBJECT_TAG, not TVAR_TAG. - Deprecated SpecializableCompanion in favor of Specializable, which has the virtue of being public so it can be referenced from outside the library. - Cooked up mechanism to group specializable types so we don't have to repeat ourselves quite so much, and create a few groups for illustrative purposes. I'm not too serious about those names but I used up all my name-thinking-up brain for the day. - Updated genprod and friends since I had to regenerate Function1. - Put tests for a bunch of remaining specialization bugs in pending. Closes SI-4740, SI-4770, SI-5267. --- lib/scala-compiler.jar.desired.sha1 | 2 +- lib/scala-library-src.jar.desired.sha1 | 2 +- lib/scala-library.jar.desired.sha1 | 2 +- src/build/genprod.scala | 27 +++++---- .../reflect/internal/ClassfileConstants.scala | 1 + .../scala/reflect/internal/Definitions.scala | 25 ++++++--- .../tools/nsc/transform/SpecializeTypes.scala | 65 +++++++++++++--------- .../scala/tools/nsc/typechecker/Duplicators.scala | 31 +++++++---- src/library/scala/AnyValCompanion.scala | 2 +- src/library/scala/Function0.scala | 14 +++-- src/library/scala/Function1.scala | 14 ++--- src/library/scala/Function10.scala | 4 +- src/library/scala/Function11.scala | 4 +- src/library/scala/Function12.scala | 4 +- src/library/scala/Function13.scala | 4 +- src/library/scala/Function14.scala | 4 +- src/library/scala/Function15.scala | 4 +- src/library/scala/Function16.scala | 4 +- src/library/scala/Function17.scala | 4 +- src/library/scala/Function18.scala | 4 +- src/library/scala/Function19.scala | 4 +- src/library/scala/Function2.scala | 14 +++-- src/library/scala/Function20.scala | 4 +- src/library/scala/Function21.scala | 4 +- src/library/scala/Function22.scala | 4 +- src/library/scala/Function3.scala | 4 +- src/library/scala/Function4.scala | 4 +- src/library/scala/Function5.scala | 4 +- src/library/scala/Function6.scala | 4 +- src/library/scala/Function7.scala | 4 +- src/library/scala/Function8.scala | 4 +- src/library/scala/Function9.scala | 4 +- src/library/scala/Predef.scala | 3 +- src/library/scala/Product1.scala | 4 +- src/library/scala/Product10.scala | 4 +- src/library/scala/Product11.scala | 4 +- src/library/scala/Product12.scala | 4 +- src/library/scala/Product13.scala | 4 +- src/library/scala/Product14.scala | 4 +- src/library/scala/Product15.scala | 4 +- src/library/scala/Product16.scala | 4 +- src/library/scala/Product17.scala | 4 +- src/library/scala/Product18.scala | 4 +- src/library/scala/Product19.scala | 4 +- src/library/scala/Product2.scala | 4 +- src/library/scala/Product20.scala | 4 +- src/library/scala/Product21.scala | 4 +- src/library/scala/Product22.scala | 4 +- src/library/scala/Product3.scala | 4 +- src/library/scala/Product4.scala | 4 +- src/library/scala/Product5.scala | 4 +- src/library/scala/Product6.scala | 4 +- src/library/scala/Product7.scala | 4 +- src/library/scala/Product8.scala | 4 +- src/library/scala/Product9.scala | 4 +- src/library/scala/Specializable.scala | 29 ++++++++++ src/library/scala/SpecializableCompanion.scala | 1 + src/library/scala/Tuple1.scala | 2 +- src/library/scala/Tuple10.scala | 2 +- src/library/scala/Tuple11.scala | 2 +- src/library/scala/Tuple12.scala | 2 +- src/library/scala/Tuple13.scala | 2 +- src/library/scala/Tuple14.scala | 2 +- src/library/scala/Tuple15.scala | 2 +- src/library/scala/Tuple16.scala | 2 +- src/library/scala/Tuple17.scala | 2 +- src/library/scala/Tuple18.scala | 2 +- src/library/scala/Tuple19.scala | 2 +- src/library/scala/Tuple2.scala | 2 +- src/library/scala/Tuple20.scala | 2 +- src/library/scala/Tuple21.scala | 2 +- src/library/scala/Tuple22.scala | 2 +- src/library/scala/Tuple3.scala | 2 +- src/library/scala/Tuple4.scala | 2 +- src/library/scala/Tuple5.scala | 2 +- src/library/scala/Tuple6.scala | 2 +- src/library/scala/Tuple7.scala | 2 +- src/library/scala/Tuple8.scala | 2 +- src/library/scala/Tuple9.scala | 2 +- src/library/scala/package.scala | 8 +++ src/library/scala/runtime/AbstractFunction1.scala | 2 +- src/library/scala/specialized.scala | 13 +++-- test/files/buildmanager/t2652/t2652.check | 2 +- test/files/pos/spec-Function1.scala | 2 +- test/files/pos/spec-groups.scala | 65 ++++++++++++++++++++++ test/files/pos/specialize10.scala | 7 +++ test/files/run/t3575.check | 4 ++ test/files/run/t3575.scala | 12 ++++ test/files/run/t4770.check | 2 + test/files/run/t4770.scala | 15 +++++ test/files/run/t4794.check | 2 +- test/files/specialized/arrays-traits.check | 8 +-- test/files/specialized/arrays-traits.scala | 17 +----- test/files/specialized/arrays.check | 6 +- test/pending/pos/t4012.scala | 7 +++ test/pending/pos/t4541.scala | 10 ++++ test/pending/pos/t4786.scala | 24 ++++++++ test/pending/pos/t4790.scala | 4 ++ test/pending/run/t4511.scala | 10 ++++ test/pending/run/t4971.scala | 16 ++++++ test/pending/run/t5284.scala | 14 +++++ 101 files changed, 458 insertions(+), 236 deletions(-) create mode 100644 src/library/scala/Specializable.scala create mode 100644 test/files/pos/spec-groups.scala create mode 100644 test/files/pos/specialize10.scala create mode 100644 test/files/run/t3575.check create mode 100644 test/files/run/t3575.scala create mode 100644 test/files/run/t4770.check create mode 100644 test/files/run/t4770.scala create mode 100644 test/pending/pos/t4012.scala create mode 100644 test/pending/pos/t4541.scala create mode 100644 test/pending/pos/t4786.scala create mode 100644 test/pending/pos/t4790.scala create mode 100644 test/pending/run/t4511.scala create mode 100644 test/pending/run/t4971.scala create mode 100644 test/pending/run/t5284.scala diff --git a/lib/scala-compiler.jar.desired.sha1 b/lib/scala-compiler.jar.desired.sha1 index c7f298f70b..2a56ab3880 100644 --- a/lib/scala-compiler.jar.desired.sha1 +++ b/lib/scala-compiler.jar.desired.sha1 @@ -1 +1 @@ -6e00ec5544e9e363edbdd8f46ff1f08441a46b95 ?scala-compiler.jar +797b3233ce29c4c565118742160c6c5c08800b94 ?scala-compiler.jar diff --git a/lib/scala-library-src.jar.desired.sha1 b/lib/scala-library-src.jar.desired.sha1 index 258d23a924..b187227638 100644 --- a/lib/scala-library-src.jar.desired.sha1 +++ b/lib/scala-library-src.jar.desired.sha1 @@ -1 +1 @@ -a2554c00ffd660d5ddb564e6a789f2f53080aceb ?scala-library-src.jar +dab2f9528a6135e2026650a86eea7aea542515f9 ?scala-library-src.jar diff --git a/lib/scala-library.jar.desired.sha1 b/lib/scala-library.jar.desired.sha1 index d0195909ae..a1c2895ff9 100644 --- a/lib/scala-library.jar.desired.sha1 +++ b/lib/scala-library.jar.desired.sha1 @@ -1 +1 @@ -73ad66b65fa3e609a730270769c0885425782ce9 ?scala-library.jar +c294c9d88e1b65320caf21fc96b65b11785cb381 ?scala-library.jar diff --git a/src/build/genprod.scala b/src/build/genprod.scala index 9e5b6810c1..a43b5e02c7 100644 --- a/src/build/genprod.scala +++ b/src/build/genprod.scala @@ -97,7 +97,7 @@ zzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzzz */ object FunctionZero extends Function(0) { override def genprodString = "\n// genprod generated these sources at: " + new java.util.Date() override def covariantSpecs = "@specialized " - override def descriptiveComment = functionNTemplate.format("javaVersion", "anonfun0", + override def descriptiveComment = " " + functionNTemplate.format("javaVersion", "anonfun0", """ * val javaVersion = () => sys.props("java.version") * @@ -111,10 +111,10 @@ object FunctionZero extends Function(0) { object FunctionOne extends Function(1) { override def classAnnotation = "@annotation.implicitNotFound(msg = \"No implicit view available from ${T1} => ${R}.\")\n" - override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double) " - override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) " + override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) " + override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) " - override def descriptiveComment = functionNTemplate.format("succ", "anonfun1", + override def descriptiveComment = " " + functionNTemplate.format("succ", "anonfun1", """ * val succ = (x: Int) => x + 1 * val anonfun1 = new Function1[Int, Int] { @@ -146,7 +146,7 @@ object FunctionTwo extends Function(2) { override def contravariantSpecs = "@specialized(scala.Int, scala.Long, scala.Double) " override def covariantSpecs = "@specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) " - override def descriptiveComment = functionNTemplate.format("max", "anonfun2", + override def descriptiveComment = " " + functionNTemplate.format("max", "anonfun2", """ * val max = (x: Int, y: Int) => if (x < y) y else x * @@ -175,14 +175,20 @@ class Function(val i: Int) extends Group("Function") with Arity { * * {{{ * object Main extends App { %s } - * }}}""" + * }}} + * + * Note that `Function1` does not define a total function, as might + * be suggested by the existence of [[scala.PartialFunction]]. The only + * distinction between `Function1` and `PartialFunction` is that the + * latter can specify inputs which it will not handle. + """ def toStr() = "\"" + ("" format i) + "\"" def apply() = { {header} /** A function of {i} parameter{s}. - * {descriptiveComment} + *{descriptiveComment} */ {classAnnotation}trait {className}{contraCoArgs} extends AnyRef {{ self => /** Apply the body of this function to the argument{s}. @@ -211,12 +217,11 @@ class Function(val i: Int) extends Group("Function") with Arity { ) // f(x1,x2,x3,x4,x5,x6) == (f.curried)(x1)(x2)(x3)(x4)(x5)(x6) - def curryComment = { """ - /** Creates a curried version of this function. + def curryComment = { +"""/** Creates a curried version of this function. * * @return a function `f` such that `f%s == apply%s` - */ -""".format(xdefs map ("(" + _ + ")") mkString, commaXs) + */""".format(xdefs map ("(" + _ + ")") mkString, commaXs) } def tupleMethod = { diff --git a/src/compiler/scala/reflect/internal/ClassfileConstants.scala b/src/compiler/scala/reflect/internal/ClassfileConstants.scala index f1bf41ede9..1c4c007de0 100644 --- a/src/compiler/scala/reflect/internal/ClassfileConstants.scala +++ b/src/compiler/scala/reflect/internal/ClassfileConstants.scala @@ -88,6 +88,7 @@ object ClassfileConstants { final val ARRAY_TAG = '[' final val VOID_TAG = 'V' final val TVAR_TAG = 'T' + final val OBJECT_TAG = 'L' final val ANNOTATION_TAG = '@' final val SCALA_NOTHING = "scala.runtime.Nothing$" final val SCALA_NULL = "scala.runtime.Null$" diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 5b2c61701d..b4b0a7335d 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -71,7 +71,7 @@ trait Definitions extends reflect.api.StandardDefinitions { tpnme.Double -> DOUBLE_TAG, tpnme.Boolean -> BOOL_TAG, tpnme.Unit -> VOID_TAG, - tpnme.Object -> TVAR_TAG + tpnme.Object -> OBJECT_TAG ) private def classesMap[T](f: Name => T) = symbolsMap(ScalaValueClassesNoUnit, f) @@ -128,6 +128,7 @@ trait Definitions extends reflect.api.StandardDefinitions { FloatClass, DoubleClass ) + def ScalaValueClassCompanions: List[Symbol] = ScalaValueClasses map (_.companionSymbol) } object definitions extends AbsDefinitions with ValueClassDefinitions { @@ -209,8 +210,12 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val AnyClass = newClass(ScalaPackageClass, tpnme.Any, Nil, ABSTRACT) lazy val AnyRefClass = newAlias(ScalaPackageClass, tpnme.AnyRef, ObjectClass.typeConstructor) lazy val ObjectClass = getClass(sn.Object) - lazy val AnyCompanionClass = getRequiredClass("scala.AnyCompanion") initFlags (SEALED | ABSTRACT | TRAIT) - lazy val AnyValCompanionClass = getRequiredClass("scala.AnyValCompanion") initFlags (SEALED | ABSTRACT | TRAIT) + + // Note: this is not the type alias AnyRef, it's a companion-like + // object used by the @specialize annotation. + def AnyRefModule = getMember(ScalaPackageClass, nme.AnyRef) + @deprecated("Use AnyRefModule", "2.10.0") + def Predef_AnyRef = AnyRefModule // bottom types lazy val RuntimeNothingClass = getClass(fulltpnme.RuntimeNothing) @@ -265,9 +270,7 @@ trait Definitions extends reflect.api.StandardDefinitions { lazy val PredefModule: Symbol = getRequiredModule("scala.Predef") lazy val PredefModuleClass = PredefModule.moduleClass - // Note: this is not the type alias AnyRef, it's a val defined in Predef - // used by the @specialize annotation. - def Predef_AnyRef = getMember(PredefModule, nme.AnyRef) + def Predef_classOf = getMember(PredefModule, nme.classOf) def Predef_identity = getMember(PredefModule, nme.identity) def Predef_conforms = getMember(PredefModule, nme.conforms) @@ -281,6 +284,11 @@ trait Definitions extends reflect.api.StandardDefinitions { def isPredefMemberNamed(sym: Symbol, name: Name) = ( (sym.name == name) && (sym.owner == PredefModule.moduleClass) ) + + /** Specialization. + */ + lazy val SpecializableModule = getRequiredModule("scala.Specializable") + lazy val GroupOfSpecializable = SpecializableModule.info.member(newTypeName("Group")) lazy val ConsoleModule: Symbol = getRequiredModule("scala.Console") lazy val ScalaRunTimeModule: Symbol = getRequiredModule("scala.runtime.ScalaRunTime") @@ -883,8 +891,9 @@ trait Definitions extends reflect.api.StandardDefinitions { private lazy val boxedValueClassesSet = boxedClass.values.toSet + BoxedUnitClass /** Is symbol a value class? */ - def isValueClass(sym: Symbol) = scalaValueClassesSet(sym) - def isNonUnitValueClass(sym: Symbol) = (sym != UnitClass) && isValueClass(sym) + def isValueClass(sym: Symbol) = scalaValueClassesSet(sym) + def isNonUnitValueClass(sym: Symbol) = isValueClass(sym) && (sym != UnitClass) + def isSpecializableClass(sym: Symbol) = isValueClass(sym) || (sym == AnyRefClass) def isScalaValueType(tp: Type) = scalaValueClassesSet(tp.typeSymbol) /** Is symbol a boxed value class, e.g. java.lang.Integer? */ diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 4012d08e42..05f5dbc379 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -6,12 +6,9 @@ package scala.tools.nsc package transform - import scala.tools.nsc.symtab.Flags import scala.collection.{ mutable, immutable } - - /** Specialize code on types. * * Make sure you've read the thesis: @@ -71,10 +68,10 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { RootClass, BooleanClass, UnitClass, ArrayClass, ScalaValueClasses, isValueClass, isScalaValueType, SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass, - AnyRefClass, ObjectClass, Predef_AnyRef, - uncheckedVarianceClass + AnyRefClass, ObjectClass, AnyRefModule, + GroupOfSpecializable, uncheckedVarianceClass } - + /** TODO - this is a lot of maps. */ @@ -105,16 +102,26 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { private def isSpecialized(sym: Symbol) = sym hasAnnotation SpecializedClass private def hasSpecializedFlag(sym: Symbol) = sym hasFlag SPECIALIZED private def specializedTypes(tps: List[Symbol]) = tps filter isSpecialized - private def specializedOn(sym: Symbol) = sym getAnnotation SpecializedClass match { - case Some(AnnotationInfo(_, args, _)) => args - case _ => Nil + private def specializedOn(sym: Symbol): List[Symbol] = { + sym getAnnotation SpecializedClass match { + case Some(ann @ AnnotationInfo(_, args, _)) => + args map (_.tpe) flatMap { tp => + tp baseType GroupOfSpecializable match { + case TypeRef(_, GroupOfSpecializable, arg :: Nil) => + arg.typeArgs map (_.typeSymbol) + case _ => + List(tp.typeSymbol) + } + } + case _ => Nil + } } // If we replace `isBoundedGeneric` with (tp <:< AnyRefClass.tpe), // then pos/spec-List.scala fails - why? Does this kind of check fail // for similar reasons? Does `sym.isAbstractType` make a difference? private def isSpecializedAnyRefSubtype(tp: Type, sym: Symbol) = ( - specializedOn(sym).exists(_.symbol == Predef_AnyRef) // specialized on AnyRef + (specializedOn(sym) contains AnyRefModule) && !isValueClass(tp.typeSymbol) && isBoundedGeneric(tp) ) @@ -322,28 +329,34 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { } } - lazy val primitiveTypes = ScalaValueClasses map (_.tpe) + lazy val specializableTypes = (ScalaValueClasses :+ AnyRefClass) map (_.tpe) sorted + + /** If the symbol is the companion of a value class, the value class. + * Otherwise, AnyRef. + */ + def specializesClass(sym: Symbol): Symbol = { + val c = sym.companionClass + if (isValueClass(c)) c else AnyRefClass + } /** Return the types `sym` should be specialized at. This may be some of the primitive types * or AnyRef. AnyRef means that a new type parameter T will be generated later, known to be a * subtype of AnyRef (T <: AnyRef). * These are in a meaningful order for stability purposes. */ - def concreteTypes(sym: Symbol): List[Type] = ( - if (!isSpecialized(sym)) Nil // no @specialized Annotation - else specializedOn(sym) match { - case Nil => primitiveTypes // specialized on everything - case args => // specialized on args - (args map { tp => - if (tp.symbol == Predef_AnyRef) { - if (isBoundedGeneric(sym.tpe)) - reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".") - AnyRefClass.tpe - } - else tp.symbol.companionClass.tpe - }).sorted - } - ) + def concreteTypes(sym: Symbol): List[Type] = { + val types = ( + if (!isSpecialized(sym)) Nil // no @specialized Annotation + else specializedOn(sym) match { + case Nil => specializableTypes // specialized on everything + case args => args map (s => specializesClass(s).tpe) sorted // specialized on args + } + ) + if (isBoundedGeneric(sym.tpe) && (types contains AnyRefClass)) + reporter.warning(sym.pos, sym + " is always a subtype of " + AnyRefClass.tpe + ".") + + types + } /** Return a list of all type environments for all specializations * of @specialized types in `tps`. diff --git a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala index 3536608efd..179bea0035 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Duplicators.scala @@ -7,7 +7,6 @@ package scala.tools.nsc package typechecker import scala.tools.nsc.symtab.Flags - import scala.collection.{ mutable, immutable } /** Duplicate trees and re-type check them, taking care to replace @@ -18,6 +17,7 @@ import scala.collection.{ mutable, immutable } */ abstract class Duplicators extends Analyzer { import global._ + import definitions.{ AnyRefClass, AnyValClass } def retyped(context: Context, tree: Tree): Tree = { resetClassOwners @@ -308,17 +308,26 @@ abstract class Duplicators extends Analyzer { super.typed(atPos(tree.pos)(tree1)) */ case Match(scrut, cases) => - val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType) + val scrut1 = typed(scrut, EXPRmode | BYVALmode, WildcardType) val scrutTpe = scrut1.tpe.widen - val cases1 = if (scrutTpe.isFinalType) cases filter { - case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) => - // the typed pattern is not incompatible with the scrutinee type - scrutTpe.matchesPattern(fixType(tpt.tpe)) - case CaseDef(Typed(_, tpt), EmptyTree, body) => - // the typed pattern is not incompatible with the scrutinee type - scrutTpe.matchesPattern(fixType(tpt.tpe)) - case _ => true - } else cases + val cases1 = { + if (scrutTpe.isFinalType) cases filter { + case CaseDef(Bind(_, pat @ Typed(_, tpt)), EmptyTree, body) => + // the typed pattern is not incompatible with the scrutinee type + scrutTpe matchesPattern fixType(tpt.tpe) + case CaseDef(Typed(_, tpt), EmptyTree, body) => + // the typed pattern is not incompatible with the scrutinee type + scrutTpe matchesPattern fixType(tpt.tpe) + case _ => true + } + // Without this, AnyRef specializations crash on patterns like + // case _: Boolean => ... + // Not at all sure this is safe. + else if (scrutTpe <:< AnyRefClass.tpe) + cases filterNot (_.pat.tpe <:< AnyValClass.tpe) + else + cases + } super.typed(atPos(tree.pos)(Match(scrut, cases1)), mode, pt) diff --git a/src/library/scala/AnyValCompanion.scala b/src/library/scala/AnyValCompanion.scala index d6cb498185..47555938a0 100644 --- a/src/library/scala/AnyValCompanion.scala +++ b/src/library/scala/AnyValCompanion.scala @@ -18,4 +18,4 @@ package scala * }}} * */ -private[scala] trait AnyValCompanion extends SpecializableCompanion { } +private[scala] trait AnyValCompanion extends Specializable { } diff --git a/src/library/scala/Function0.scala b/src/library/scala/Function0.scala index f68bbcc454..508ef25e81 100644 --- a/src/library/scala/Function0.scala +++ b/src/library/scala/Function0.scala @@ -6,18 +6,18 @@ ** |/ ** \* */ // GENERATED CODE: DO NOT EDIT. -// genprod generated these sources at: Sun Jul 31 00:37:30 CEST 2011 +// genprod generated these sources at: Tue Feb 14 16:49:03 PST 2012 package scala /** A function of 0 parameters. - * + * * In the following example, the definition of javaVersion is a * shorthand for the anonymous class definition anonfun0: * * {{{ - * object Main extends Application { + * object Main extends App { * val javaVersion = () => sys.props("java.version") * * val anonfun0 = new Function0[String] { @@ -26,12 +26,18 @@ package scala * assert(javaVersion() == anonfun0()) * } * }}} + * + * Note that `Function1` does not define a total function, as might + * be suggested by the existence of [[scala.PartialFunction]]. The only + * distinction between `Function1` and `PartialFunction` is that the + * latter can specify inputs which it will not handle. + */ trait Function0[@specialized +R] extends AnyRef { self => /** Apply the body of this function to the arguments. * @return the result of function application. */ def apply(): R - + override def toString() = "" } diff --git a/src/library/scala/Function1.scala b/src/library/scala/Function1.scala index 7517e6604b..06936e54cb 100644 --- a/src/library/scala/Function1.scala +++ b/src/library/scala/Function1.scala @@ -11,12 +11,12 @@ package scala /** A function of 1 parameter. - * + * * In the following example, the definition of succ is a * shorthand for the anonymous class definition anonfun1: * * {{{ - * object Main extends Application { + * object Main extends App { * val succ = (x: Int) => x + 1 * val anonfun1 = new Function1[Int, Int] { * def apply(x: Int): Int = x + 1 @@ -29,17 +29,15 @@ package scala * be suggested by the existence of [[scala.PartialFunction]]. The only * distinction between `Function1` and `PartialFunction` is that the * latter can specify inputs which it will not handle. - * + */ @annotation.implicitNotFound(msg = "No implicit view available from ${T1} => ${R}.") -trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => - /** Apply the body of this function to the argument. It may throw an - * exception. - * +trait Function1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends AnyRef { self => + /** Apply the body of this function to the argument. * @return the result of function application. */ def apply(v1: T1): R - + /** Composes two instances of Function1 in a new Function1, with this function applied last. * * @tparam A the type to which function `g` can be applied diff --git a/src/library/scala/Function10.scala b/src/library/scala/Function10.scala index 6f17606afd..9e107fc53d 100644 --- a/src/library/scala/Function10.scala +++ b/src/library/scala/Function10.scala @@ -18,12 +18,10 @@ trait Function10[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, +R] extends * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10)).curried } diff --git a/src/library/scala/Function11.scala b/src/library/scala/Function11.scala index 7a73bd35bf..783a86ab5d 100644 --- a/src/library/scala/Function11.scala +++ b/src/library/scala/Function11.scala @@ -18,12 +18,10 @@ trait Function11[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, +R] ex * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11)).curried } diff --git a/src/library/scala/Function12.scala b/src/library/scala/Function12.scala index c099c0436a..7f4dee6216 100644 --- a/src/library/scala/Function12.scala +++ b/src/library/scala/Function12.scala @@ -18,12 +18,10 @@ trait Function12[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12)).curried } diff --git a/src/library/scala/Function13.scala b/src/library/scala/Function13.scala index f13db28f30..23853dde69 100644 --- a/src/library/scala/Function13.scala +++ b/src/library/scala/Function13.scala @@ -18,12 +18,10 @@ trait Function13[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13)).curried } diff --git a/src/library/scala/Function14.scala b/src/library/scala/Function14.scala index d0345cc552..372f1cfafb 100644 --- a/src/library/scala/Function14.scala +++ b/src/library/scala/Function14.scala @@ -18,12 +18,10 @@ trait Function14[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14)).curried } diff --git a/src/library/scala/Function15.scala b/src/library/scala/Function15.scala index 69ff039f5b..47c7309695 100644 --- a/src/library/scala/Function15.scala +++ b/src/library/scala/Function15.scala @@ -18,12 +18,10 @@ trait Function15[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15)).curried } diff --git a/src/library/scala/Function16.scala b/src/library/scala/Function16.scala index d544d89303..8eea42de5b 100644 --- a/src/library/scala/Function16.scala +++ b/src/library/scala/Function16.scala @@ -18,12 +18,10 @@ trait Function16[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16)).curried } diff --git a/src/library/scala/Function17.scala b/src/library/scala/Function17.scala index 16c71e7ada..2d93af34f2 100644 --- a/src/library/scala/Function17.scala +++ b/src/library/scala/Function17.scala @@ -18,12 +18,10 @@ trait Function17[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17)).curried } diff --git a/src/library/scala/Function18.scala b/src/library/scala/Function18.scala index dfd70c2353..ffca98c443 100644 --- a/src/library/scala/Function18.scala +++ b/src/library/scala/Function18.scala @@ -18,12 +18,10 @@ trait Function18[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18)).curried } diff --git a/src/library/scala/Function19.scala b/src/library/scala/Function19.scala index 63decd03ad..f661ea7707 100644 --- a/src/library/scala/Function19.scala +++ b/src/library/scala/Function19.scala @@ -18,12 +18,10 @@ trait Function19[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19)).curried } diff --git a/src/library/scala/Function2.scala b/src/library/scala/Function2.scala index a4ad87fa97..1812f042e0 100644 --- a/src/library/scala/Function2.scala +++ b/src/library/scala/Function2.scala @@ -11,12 +11,12 @@ package scala /** A function of 2 parameters. - * + * * In the following example, the definition of max is a * shorthand for the anonymous class definition anonfun2: * * {{{ - * object Main extends Application { + * object Main extends App { * val max = (x: Int, y: Int) => if (x < y) y else x * * val anonfun2 = new Function2[Int, Int, Int] { @@ -25,18 +25,22 @@ package scala * assert(max(0, 1) == anonfun2(0, 1)) * } * }}} + * + * Note that `Function1` does not define a total function, as might + * be suggested by the existence of [[scala.PartialFunction]]. The only + * distinction between `Function1` and `PartialFunction` is that the + * latter can specify inputs which it will not handle. + */ trait Function2[@specialized(scala.Int, scala.Long, scala.Double) -T1, @specialized(scala.Int, scala.Long, scala.Double) -T2, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends AnyRef { self => /** Apply the body of this function to the arguments. * @return the result of function application. */ def apply(v1: T1, v2: T2): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2) == apply(x1, x2)` - */ - def curried: T1 => T2 => R = { + */ def curried: T1 => T2 => R = { (x1: T1) => (x2: T2) => apply(x1, x2) } diff --git a/src/library/scala/Function20.scala b/src/library/scala/Function20.scala index 7219c9be81..e4fb9f280c 100644 --- a/src/library/scala/Function20.scala +++ b/src/library/scala/Function20.scala @@ -18,12 +18,10 @@ trait Function20[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20)).curried } diff --git a/src/library/scala/Function21.scala b/src/library/scala/Function21.scala index c7d55960db..9823386856 100644 --- a/src/library/scala/Function21.scala +++ b/src/library/scala/Function21.scala @@ -18,12 +18,10 @@ trait Function21[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21)).curried } diff --git a/src/library/scala/Function22.scala b/src/library/scala/Function22.scala index 196421c830..e708f7f49a 100644 --- a/src/library/scala/Function22.scala +++ b/src/library/scala/Function22.scala @@ -18,12 +18,10 @@ trait Function22[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, -T10, -T11, -T12, * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9, v10: T10, v11: T11, v12: T12, v13: T13, v14: T14, v15: T15, v16: T16, v17: T17, v18: T18, v19: T19, v20: T20, v21: T21, v22: T22): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9)(x10)(x11)(x12)(x13)(x14)(x15)(x16)(x17)(x18)(x19)(x20)(x21)(x22) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => T10 => T11 => T12 => T13 => T14 => T15 => T16 => T17 => T18 => T19 => T20 => T21 => T22 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9, x10: T10, x11: T11, x12: T12, x13: T13, x14: T14, x15: T15, x16: T16, x17: T17, x18: T18, x19: T19, x20: T20, x21: T21, x22: T22) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9, x10, x11, x12, x13, x14, x15, x16, x17, x18, x19, x20, x21, x22)).curried } diff --git a/src/library/scala/Function3.scala b/src/library/scala/Function3.scala index 09a5aa5828..62a997c1b5 100644 --- a/src/library/scala/Function3.scala +++ b/src/library/scala/Function3.scala @@ -18,12 +18,10 @@ trait Function3[-T1, -T2, -T3, +R] extends AnyRef { self => * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3) == apply(x1, x2, x3)` - */ - def curried: T1 => T2 => T3 => R = { + */ def curried: T1 => T2 => T3 => R = { (x1: T1) => (x2: T2) => (x3: T3) => apply(x1, x2, x3) } diff --git a/src/library/scala/Function4.scala b/src/library/scala/Function4.scala index 00da84636a..86d2faeac8 100644 --- a/src/library/scala/Function4.scala +++ b/src/library/scala/Function4.scala @@ -18,12 +18,10 @@ trait Function4[-T1, -T2, -T3, -T4, +R] extends AnyRef { self => * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4) == apply(x1, x2, x3, x4)` - */ - def curried: T1 => T2 => T3 => T4 => R = { + */ def curried: T1 => T2 => T3 => T4 => R = { (x1: T1) => (x2: T2) => (x3: T3) => (x4: T4) => apply(x1, x2, x3, x4) } diff --git a/src/library/scala/Function5.scala b/src/library/scala/Function5.scala index 3915048906..bd9af77f12 100644 --- a/src/library/scala/Function5.scala +++ b/src/library/scala/Function5.scala @@ -18,12 +18,10 @@ trait Function5[-T1, -T2, -T3, -T4, -T5, +R] extends AnyRef { self => * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5) == apply(x1, x2, x3, x4, x5)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5) => self.apply(x1, x2, x3, x4, x5)).curried } diff --git a/src/library/scala/Function6.scala b/src/library/scala/Function6.scala index 183a7332e1..4f601a468c 100644 --- a/src/library/scala/Function6.scala +++ b/src/library/scala/Function6.scala @@ -18,12 +18,10 @@ trait Function6[-T1, -T2, -T3, -T4, -T5, -T6, +R] extends AnyRef { self => * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6) == apply(x1, x2, x3, x4, x5, x6)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6) => self.apply(x1, x2, x3, x4, x5, x6)).curried } diff --git a/src/library/scala/Function7.scala b/src/library/scala/Function7.scala index 10f8e9b599..6978b6545d 100644 --- a/src/library/scala/Function7.scala +++ b/src/library/scala/Function7.scala @@ -18,12 +18,10 @@ trait Function7[-T1, -T2, -T3, -T4, -T5, -T6, -T7, +R] extends AnyRef { self => * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7) == apply(x1, x2, x3, x4, x5, x6, x7)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7) => self.apply(x1, x2, x3, x4, x5, x6, x7)).curried } diff --git a/src/library/scala/Function8.scala b/src/library/scala/Function8.scala index 8144b36101..903551d939 100644 --- a/src/library/scala/Function8.scala +++ b/src/library/scala/Function8.scala @@ -18,12 +18,10 @@ trait Function8[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, +R] extends AnyRef { sel * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8) == apply(x1, x2, x3, x4, x5, x6, x7, x8)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8)).curried } diff --git a/src/library/scala/Function9.scala b/src/library/scala/Function9.scala index ee04ed0915..0c273ba929 100644 --- a/src/library/scala/Function9.scala +++ b/src/library/scala/Function9.scala @@ -18,12 +18,10 @@ trait Function9[-T1, -T2, -T3, -T4, -T5, -T6, -T7, -T8, -T9, +R] extends AnyRef * @return the result of function application. */ def apply(v1: T1, v2: T2, v3: T3, v4: T4, v5: T5, v6: T6, v7: T7, v8: T8, v9: T9): R - /** Creates a curried version of this function. * * @return a function `f` such that `f(x1)(x2)(x3)(x4)(x5)(x6)(x7)(x8)(x9) == apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)` - */ - def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = { + */ def curried: T1 => T2 => T3 => T4 => T5 => T6 => T7 => T8 => T9 => R = { (x1: T1) => ((x2: T2, x3: T3, x4: T4, x5: T5, x6: T6, x7: T7, x8: T8, x9: T9) => self.apply(x1, x2, x3, x4, x5, x6, x7, x8, x9)).curried } diff --git a/src/library/scala/Predef.scala b/src/library/scala/Predef.scala index 824e048e73..a2ee76500c 100644 --- a/src/library/scala/Predef.scala +++ b/src/library/scala/Predef.scala @@ -95,7 +95,8 @@ object Predef extends LowPriorityImplicits { type Set[A] = immutable.Set[A] val Map = immutable.Map val Set = immutable.Set - val AnyRef = new SpecializableCompanion {} // a dummy used by the specialization annotation + // @deprecated("Use scala.AnyRef instead", "2.10.0") + // def AnyRef = scala.AnyRef // Manifest types, companions, and incantations for summoning type ClassManifest[T] = scala.reflect.ClassManifest[T] diff --git a/src/library/scala/Product1.scala b/src/library/scala/Product1.scala index ab8b0a4505..0106ad34ee 100644 --- a/src/library/scala/Product1.scala +++ b/src/library/scala/Product1.scala @@ -23,7 +23,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product { */ override def productArity = 1 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product1[@specialized(Int, Long, Double) +T1] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case _ => throw new IndexOutOfBoundsException(n.toString()) } diff --git a/src/library/scala/Product10.scala b/src/library/scala/Product10.scala index 536fb2fed9..ca53b580c0 100644 --- a/src/library/scala/Product10.scala +++ b/src/library/scala/Product10.scala @@ -23,7 +23,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ */ override def productArity = 10 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10] extends Produ */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product11.scala b/src/library/scala/Product11.scala index 7d49eccc5e..3d5942f3fa 100644 --- a/src/library/scala/Product11.scala +++ b/src/library/scala/Product11.scala @@ -23,7 +23,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends */ override def productArity = 11 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11] extends */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product12.scala b/src/library/scala/Product12.scala index 0e9c4a01a2..803193793c 100644 --- a/src/library/scala/Product12.scala +++ b/src/library/scala/Product12.scala @@ -23,7 +23,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e */ override def productArity = 12 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12] e */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product13.scala b/src/library/scala/Product13.scala index a0629201d0..0c1d889624 100644 --- a/src/library/scala/Product13.scala +++ b/src/library/scala/Product13.scala @@ -23,7 +23,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 13 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product14.scala b/src/library/scala/Product14.scala index 32dda81c3e..0222309a0a 100644 --- a/src/library/scala/Product14.scala +++ b/src/library/scala/Product14.scala @@ -23,7 +23,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 14 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product15.scala b/src/library/scala/Product15.scala index 57851f9870..41be7ec504 100644 --- a/src/library/scala/Product15.scala +++ b/src/library/scala/Product15.scala @@ -23,7 +23,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 15 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product16.scala b/src/library/scala/Product16.scala index 75076f3b3c..accee3f965 100644 --- a/src/library/scala/Product16.scala +++ b/src/library/scala/Product16.scala @@ -23,7 +23,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 16 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product17.scala b/src/library/scala/Product17.scala index 9ee6072ffe..da80ae9a6b 100644 --- a/src/library/scala/Product17.scala +++ b/src/library/scala/Product17.scala @@ -23,7 +23,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 17 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product18.scala b/src/library/scala/Product18.scala index 25d0839af1..ea25647762 100644 --- a/src/library/scala/Product18.scala +++ b/src/library/scala/Product18.scala @@ -23,7 +23,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 18 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product19.scala b/src/library/scala/Product19.scala index 5464de7264..5d4347c1a8 100644 --- a/src/library/scala/Product19.scala +++ b/src/library/scala/Product19.scala @@ -23,7 +23,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 19 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product2.scala b/src/library/scala/Product2.scala index 8097245926..4e6c70f463 100644 --- a/src/library/scala/Product2.scala +++ b/src/library/scala/Product2.scala @@ -23,7 +23,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub */ override def productArity = 2 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, Doub */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case _ => throw new IndexOutOfBoundsException(n.toString()) diff --git a/src/library/scala/Product20.scala b/src/library/scala/Product20.scala index b094e09aca..f23a0dee3a 100644 --- a/src/library/scala/Product20.scala +++ b/src/library/scala/Product20.scala @@ -23,7 +23,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 20 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product21.scala b/src/library/scala/Product21.scala index fa06cfb438..4a4fe0697f 100644 --- a/src/library/scala/Product21.scala +++ b/src/library/scala/Product21.scala @@ -23,7 +23,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 21 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product22.scala b/src/library/scala/Product22.scala index 46038bf1a2..7ee01b85ae 100644 --- a/src/library/scala/Product22.scala +++ b/src/library/scala/Product22.scala @@ -23,7 +23,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ override def productArity = 22 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12, + */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product3.scala b/src/library/scala/Product3.scala index 3a4cd8fc5e..23563c9e23 100644 --- a/src/library/scala/Product3.scala +++ b/src/library/scala/Product3.scala @@ -23,7 +23,7 @@ trait Product3[+T1, +T2, +T3] extends Product { */ override def productArity = 3 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product3[+T1, +T2, +T3] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product4.scala b/src/library/scala/Product4.scala index a4d47457fa..4abaa9051b 100644 --- a/src/library/scala/Product4.scala +++ b/src/library/scala/Product4.scala @@ -23,7 +23,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product { */ override def productArity = 4 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product4[+T1, +T2, +T3, +T4] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product5.scala b/src/library/scala/Product5.scala index 9f25e70af0..9aa4af58b7 100644 --- a/src/library/scala/Product5.scala +++ b/src/library/scala/Product5.scala @@ -23,7 +23,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product { */ override def productArity = 5 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product5[+T1, +T2, +T3, +T4, +T5] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product6.scala b/src/library/scala/Product6.scala index 87fd318c68..2ca1d7c31e 100644 --- a/src/library/scala/Product6.scala +++ b/src/library/scala/Product6.scala @@ -23,7 +23,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product { */ override def productArity = 6 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product6[+T1, +T2, +T3, +T4, +T5, +T6] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product7.scala b/src/library/scala/Product7.scala index d074503315..b7af2d3e32 100644 --- a/src/library/scala/Product7.scala +++ b/src/library/scala/Product7.scala @@ -23,7 +23,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product { */ override def productArity = 7 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product7[+T1, +T2, +T3, +T4, +T5, +T6, +T7] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product8.scala b/src/library/scala/Product8.scala index bd6150c235..17b5e48512 100644 --- a/src/library/scala/Product8.scala +++ b/src/library/scala/Product8.scala @@ -23,7 +23,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product { */ override def productArity = 8 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Product9.scala b/src/library/scala/Product9.scala index 1f042944cc..784e9a7029 100644 --- a/src/library/scala/Product9.scala +++ b/src/library/scala/Product9.scala @@ -23,7 +23,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product { */ override def productArity = 9 - + /** Returns the n-th projection of this product if 0 < n <= productArity, * otherwise throws an `IndexOutOfBoundsException`. * @@ -33,7 +33,7 @@ trait Product9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9] extends Product { */ @throws(classOf[IndexOutOfBoundsException]) - override def productElement(n: Int) = n match { + override def productElement(n: Int) = n match { case 0 => _1 case 1 => _2 case 2 => _3 diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala new file mode 100644 index 0000000000..811a735110 --- /dev/null +++ b/src/library/scala/Specializable.scala @@ -0,0 +1,29 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala + +/** A common supertype for companions of specializable types. + * Should not be extended in user code. + */ +trait Specializable extends SpecializableCompanion + +object Specializable { + // No type parameter in @specialized annotation. + trait SpecializedGroup { } + + // Smuggle a list of types by way of a tuple upon which Group is parameterized. + class Group[T >: Null](value: T) extends SpecializedGroup { } + + final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit) + final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef) + final val Bits32AndUp = new Group(Int, Long, Float, Double) + final val Integral = new Group(Byte, Short, Int, Long, Char) + final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double) + final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef) +} diff --git a/src/library/scala/SpecializableCompanion.scala b/src/library/scala/SpecializableCompanion.scala index fbdf42fd0b..ec797c1f15 100644 --- a/src/library/scala/SpecializableCompanion.scala +++ b/src/library/scala/SpecializableCompanion.scala @@ -10,4 +10,5 @@ package scala /** A common supertype for companion classes which specialization takes into account. */ +@deprecated("Use Specializable instead", "2.10.0") private[scala] trait SpecializableCompanion diff --git a/src/library/scala/Tuple1.scala b/src/library/scala/Tuple1.scala index 6d31d35e51..02fdd0cba5 100644 --- a/src/library/scala/Tuple1.scala +++ b/src/library/scala/Tuple1.scala @@ -19,5 +19,5 @@ case class Tuple1[@specialized(Int, Long, Double) +T1](_1: T1) extends Product1[T1] { override def toString() = "(" + _1 + ")" - + } diff --git a/src/library/scala/Tuple10.scala b/src/library/scala/Tuple10.scala index 10d554d467..ba2a02a8b2 100644 --- a/src/library/scala/Tuple10.scala +++ b/src/library/scala/Tuple10.scala @@ -28,5 +28,5 @@ case class Tuple10[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10](_1: T1, _2 extends Product10[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + ")" - + } diff --git a/src/library/scala/Tuple11.scala b/src/library/scala/Tuple11.scala index 2065e4f017..7f51d172d4 100644 --- a/src/library/scala/Tuple11.scala +++ b/src/library/scala/Tuple11.scala @@ -29,5 +29,5 @@ case class Tuple11[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11](_1: extends Product11[T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + ")" - + } diff --git a/src/library/scala/Tuple12.scala b/src/library/scala/Tuple12.scala index a463986752..4bbc6a0eab 100644 --- a/src/library/scala/Tuple12.scala +++ b/src/library/scala/Tuple12.scala @@ -31,5 +31,5 @@ case class Tuple12[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + ")" - + } diff --git a/src/library/scala/Tuple13.scala b/src/library/scala/Tuple13.scala index 2bee0d69ad..77bd59bf2e 100644 --- a/src/library/scala/Tuple13.scala +++ b/src/library/scala/Tuple13.scala @@ -32,5 +32,5 @@ case class Tuple13[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + ")" - + } diff --git a/src/library/scala/Tuple14.scala b/src/library/scala/Tuple14.scala index 60f7c51e64..bf7a4ce016 100644 --- a/src/library/scala/Tuple14.scala +++ b/src/library/scala/Tuple14.scala @@ -33,5 +33,5 @@ case class Tuple14[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + ")" - + } diff --git a/src/library/scala/Tuple15.scala b/src/library/scala/Tuple15.scala index fc8e30580b..582c359bc6 100644 --- a/src/library/scala/Tuple15.scala +++ b/src/library/scala/Tuple15.scala @@ -34,5 +34,5 @@ case class Tuple15[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + ")" - + } diff --git a/src/library/scala/Tuple16.scala b/src/library/scala/Tuple16.scala index 80181f6648..a1e9a790ff 100644 --- a/src/library/scala/Tuple16.scala +++ b/src/library/scala/Tuple16.scala @@ -35,5 +35,5 @@ case class Tuple16[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + ")" - + } diff --git a/src/library/scala/Tuple17.scala b/src/library/scala/Tuple17.scala index 6236122be2..f531766c18 100644 --- a/src/library/scala/Tuple17.scala +++ b/src/library/scala/Tuple17.scala @@ -36,5 +36,5 @@ case class Tuple17[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + ")" - + } diff --git a/src/library/scala/Tuple18.scala b/src/library/scala/Tuple18.scala index dd6a819ac5..a96db25e4b 100644 --- a/src/library/scala/Tuple18.scala +++ b/src/library/scala/Tuple18.scala @@ -37,5 +37,5 @@ case class Tuple18[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + ")" - + } diff --git a/src/library/scala/Tuple19.scala b/src/library/scala/Tuple19.scala index 65f0fd22cf..718280d68a 100644 --- a/src/library/scala/Tuple19.scala +++ b/src/library/scala/Tuple19.scala @@ -38,5 +38,5 @@ case class Tuple19[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + ")" - + } diff --git a/src/library/scala/Tuple2.scala b/src/library/scala/Tuple2.scala index dd6ac0cfd2..ad3f7df697 100644 --- a/src/library/scala/Tuple2.scala +++ b/src/library/scala/Tuple2.scala @@ -23,7 +23,7 @@ case class Tuple2[@specialized(Int, Long, Double) +T1, @specialized(Int, Long, D extends Product2[T1, T2] { override def toString() = "(" + _1 + "," + _2 + ")" - + /** Swaps the elements of this `Tuple`. * @return a new Tuple where the first element is the second element of this Tuple and the * second element is the first element of this Tuple. diff --git a/src/library/scala/Tuple20.scala b/src/library/scala/Tuple20.scala index cf3626909d..4a44c0bb89 100644 --- a/src/library/scala/Tuple20.scala +++ b/src/library/scala/Tuple20.scala @@ -39,5 +39,5 @@ case class Tuple20[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + ")" - + } diff --git a/src/library/scala/Tuple21.scala b/src/library/scala/Tuple21.scala index 78b9c585c6..580a169e39 100644 --- a/src/library/scala/Tuple21.scala +++ b/src/library/scala/Tuple21.scala @@ -40,5 +40,5 @@ case class Tuple21[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + ")" - + } diff --git a/src/library/scala/Tuple22.scala b/src/library/scala/Tuple22.scala index 0993dfbbc3..fd3392ddea 100644 --- a/src/library/scala/Tuple22.scala +++ b/src/library/scala/Tuple22.scala @@ -41,5 +41,5 @@ case class Tuple22[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9, +T10, +T11, +T12 { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + "," + _10 + "," + _11 + "," + _12 + "," + _13 + "," + _14 + "," + _15 + "," + _16 + "," + _17 + "," + _18 + "," + _19 + "," + _20 + "," + _21 + "," + _22 + ")" - + } diff --git a/src/library/scala/Tuple3.scala b/src/library/scala/Tuple3.scala index dfa0c962a2..0d5399308b 100644 --- a/src/library/scala/Tuple3.scala +++ b/src/library/scala/Tuple3.scala @@ -24,7 +24,7 @@ case class Tuple3[+T1, +T2, +T3](_1: T1, _2: T2, _3: T3) extends Product3[T1, T2, T3] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + ")" - + @deprecated("Use `zipped` instead.", "2.9.0") def zip[Repr1, El1, El2, El3, To](implicit w1: T1 => TLike[El1, Repr1], diff --git a/src/library/scala/Tuple4.scala b/src/library/scala/Tuple4.scala index a919072c88..a859078bcf 100644 --- a/src/library/scala/Tuple4.scala +++ b/src/library/scala/Tuple4.scala @@ -22,5 +22,5 @@ case class Tuple4[+T1, +T2, +T3, +T4](_1: T1, _2: T2, _3: T3, _4: T4) extends Product4[T1, T2, T3, T4] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + ")" - + } diff --git a/src/library/scala/Tuple5.scala b/src/library/scala/Tuple5.scala index 6a94f48ab4..1edfb673ee 100644 --- a/src/library/scala/Tuple5.scala +++ b/src/library/scala/Tuple5.scala @@ -23,5 +23,5 @@ case class Tuple5[+T1, +T2, +T3, +T4, +T5](_1: T1, _2: T2, _3: T3, _4: T4, _5: T extends Product5[T1, T2, T3, T4, T5] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + ")" - + } diff --git a/src/library/scala/Tuple6.scala b/src/library/scala/Tuple6.scala index 34f8224627..5b74937e58 100644 --- a/src/library/scala/Tuple6.scala +++ b/src/library/scala/Tuple6.scala @@ -24,5 +24,5 @@ case class Tuple6[+T1, +T2, +T3, +T4, +T5, +T6](_1: T1, _2: T2, _3: T3, _4: T4, extends Product6[T1, T2, T3, T4, T5, T6] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + ")" - + } diff --git a/src/library/scala/Tuple7.scala b/src/library/scala/Tuple7.scala index 6fc3477ba2..a7f572e9f0 100644 --- a/src/library/scala/Tuple7.scala +++ b/src/library/scala/Tuple7.scala @@ -25,5 +25,5 @@ case class Tuple7[+T1, +T2, +T3, +T4, +T5, +T6, +T7](_1: T1, _2: T2, _3: T3, _4: extends Product7[T1, T2, T3, T4, T5, T6, T7] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + ")" - + } diff --git a/src/library/scala/Tuple8.scala b/src/library/scala/Tuple8.scala index 1e21b684fc..9bb427d689 100644 --- a/src/library/scala/Tuple8.scala +++ b/src/library/scala/Tuple8.scala @@ -26,5 +26,5 @@ case class Tuple8[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8](_1: T1, _2: T2, _3: T3 extends Product8[T1, T2, T3, T4, T5, T6, T7, T8] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + ")" - + } diff --git a/src/library/scala/Tuple9.scala b/src/library/scala/Tuple9.scala index 453cea31a1..4d50539e0c 100644 --- a/src/library/scala/Tuple9.scala +++ b/src/library/scala/Tuple9.scala @@ -27,5 +27,5 @@ case class Tuple9[+T1, +T2, +T3, +T4, +T5, +T6, +T7, +T8, +T9](_1: T1, _2: T2, _ extends Product9[T1, T2, T3, T4, T5, T6, T7, T8, T9] { override def toString() = "(" + _1 + "," + _2 + "," + _3 + "," + _4 + "," + _5 + "," + _6 + "," + _7 + "," + _8 + "," + _9 + ")" - + } diff --git a/src/library/scala/package.scala b/src/library/scala/package.scala index 0c5d10b15e..9425eba232 100644 --- a/src/library/scala/package.scala +++ b/src/library/scala/package.scala @@ -28,6 +28,14 @@ package object scala { type NumberFormatException = java.lang.NumberFormatException type AbstractMethodError = java.lang.AbstractMethodError + // A dummy used by the specialization annotation. + // Normally it's bad juju to place objects inside package objects, + // but there's no choice here as we'd have to be AnyRef's companion + // and defined in the same file - except there is no such file. + object AnyRef extends Specializable { + override def toString = "object AnyRef" + } + @deprecated("instead of `@serializable class C`, use `class C extends Serializable`", "2.9.0") type serializable = annotation.serializable diff --git a/src/library/scala/runtime/AbstractFunction1.scala b/src/library/scala/runtime/AbstractFunction1.scala index a9e5e90e20..b2f336fe52 100644 --- a/src/library/scala/runtime/AbstractFunction1.scala +++ b/src/library/scala/runtime/AbstractFunction1.scala @@ -9,6 +9,6 @@ package scala.runtime -abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double) +R] extends Function1[T1, R] { +abstract class AbstractFunction1[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] { } diff --git a/src/library/scala/specialized.scala b/src/library/scala/specialized.scala index 902faa166e..b24474f35d 100644 --- a/src/library/scala/specialized.scala +++ b/src/library/scala/specialized.scala @@ -6,10 +6,10 @@ ** |/ ** \* */ - - package scala +import Specializable._ + /** Annotate type parameters on which code should be automatically * specialized. For example: * {{{ @@ -24,8 +24,9 @@ package scala * * @since 2.8 */ -class specialized(types: SpecializableCompanion*) extends annotation.StaticAnnotation { - def this() { - this(Unit, Boolean, Byte, Short, Char, Int, Long, Float, Double) - } +// class tspecialized[T](group: Group[T]) extends annotation.StaticAnnotation { + +class specialized(group: SpecializedGroup) extends annotation.StaticAnnotation { + def this(types: Specializable*) = this(new Group(types.toList)) + def this() = this(Everything) } diff --git a/test/files/buildmanager/t2652/t2652.check b/test/files/buildmanager/t2652/t2652.check index 071281c6ff..b84c80205e 100644 --- a/test/files/buildmanager/t2652/t2652.check +++ b/test/files/buildmanager/t2652/t2652.check @@ -3,7 +3,7 @@ compiling Set(A.scala, B.scala) Changes: Map() builder > A.scala compiling Set(A.scala) -Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ])) +Changes: Map(class A -> List(Added(Definition(A.x$mBc$sp)), Added(Definition(A.x$mCc$sp)), Added(Definition(A.x$mDc$sp)), Added(Definition(A.x$mFc$sp)), Added(Definition(A.x$mIc$sp)), Added(Definition(A.x$mJc$sp)), Added(Definition(A.x$mLc$sp)), Added(Definition(A.x$mSc$sp)), Added(Definition(A.x$mVc$sp)), Added(Definition(A.x$mZc$sp)), Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ])) invalidate B.scala because it references changed definition [Changed(Definition(A.x))[method x changed from [T](t: T)T to [T](t: T)T flags: ]] compiling Set(B.scala) Changes: Map(object B -> List()) diff --git a/test/files/pos/spec-Function1.scala b/test/files/pos/spec-Function1.scala index 7bdcd072b2..5b6af67a74 100644 --- a/test/files/pos/spec-Function1.scala +++ b/test/files/pos/spec-Function1.scala @@ -8,7 +8,7 @@ // generated by genprod on Wed Apr 23 10:06:16 CEST 2008 (with fancy comment) (with extra methods) -package scala +package scalabip /**

diff --git a/test/files/pos/spec-groups.scala b/test/files/pos/spec-groups.scala new file mode 100644 index 0000000000..9b6359a982 --- /dev/null +++ b/test/files/pos/spec-groups.scala @@ -0,0 +1,65 @@ +import Specializable._ + +class A[@specialized(Primitives) T](x: T) { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class B[@specialized(Everything) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class C[@specialized(Bits32AndUp) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class D[@specialized(Integral) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class E[@specialized(AllNumeric) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class F[@specialized(BestOfBreed) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} +class G[@specialized(Byte, Double, AnyRef) T] { + def f1[@specialized(Primitives) U](x: T, y: U) = ((x, y)) + def f2[@specialized(Everything) U](x: T, y: U) = ((x, y)) + def f3[@specialized(Bits32AndUp) U](x: T, y: U) = ((x, y)) + def f4[@specialized(Integral) U](x: T, y: U) = ((x, y)) + def f5[@specialized(AllNumeric) U](x: T, y: U) = ((x, y)) + def f6[@specialized(BestOfBreed) U](x: T, y: U) = ((x, y)) + def f7[@specialized(Byte, Double, AnyRef) U](x: T, y: U) = ((x, y)) +} diff --git a/test/files/pos/specialize10.scala b/test/files/pos/specialize10.scala new file mode 100644 index 0000000000..bbe197cda2 --- /dev/null +++ b/test/files/pos/specialize10.scala @@ -0,0 +1,7 @@ +trait Bippy[@specialized( + scala.Char, scala.Boolean, scala.Byte, + scala.Short, scala.Int, scala.Long, + scala.Float, scala.Double, scala.Unit, + scala.AnyRef) T] { } + +trait Bippy2[@specialized(Char, Boolean, Byte, Short, Int, Long, Float, Double, Unit, AnyRef) T] { } diff --git a/test/files/run/t3575.check b/test/files/run/t3575.check new file mode 100644 index 0000000000..c240b3d90c --- /dev/null +++ b/test/files/run/t3575.check @@ -0,0 +1,4 @@ +Two +Two +Two +Two$mcII$sp diff --git a/test/files/run/t3575.scala b/test/files/run/t3575.scala new file mode 100644 index 0000000000..56950e62bb --- /dev/null +++ b/test/files/run/t3575.scala @@ -0,0 +1,12 @@ +case class Two[@specialized A, @specialized B](v: A, w: B); + +// This is here to tell me if the behavior changes, not because +// the output is endorsed. +object Test { + def main(args: Array[String]): Unit = { + println(Two("Hello", 12).getClass().getName()) + println(Two(12, "Hello").getClass().getName()) + println(Two("Hello", "World").getClass().getName()) + println(Two(12, 12).getClass().getName()) + } +} diff --git a/test/files/run/t4770.check b/test/files/run/t4770.check new file mode 100644 index 0000000000..38e5a831fa --- /dev/null +++ b/test/files/run/t4770.check @@ -0,0 +1,2 @@ +(a,2) +(2,a) diff --git a/test/files/run/t4770.scala b/test/files/run/t4770.scala new file mode 100644 index 0000000000..25bf3050c3 --- /dev/null +++ b/test/files/run/t4770.scala @@ -0,0 +1,15 @@ +package crasher { + class Z[@specialized A, @specialized(AnyRef) B](var a: A, var b: B) { + override def toString = "" + ((a, b)) + } + object O { + def apply[@specialized A, @specialized(AnyRef) B](a0: A, b0: B) = new Z(a0, b0) + } +} + +object Test { + def main(args: Array[String]): Unit = { + println(crasher.O("a", 2)) + println(crasher.O(2, "a")) + } +} diff --git a/test/files/run/t4794.check b/test/files/run/t4794.check index f599e28b8a..b4de394767 100644 --- a/test/files/run/t4794.check +++ b/test/files/run/t4794.check @@ -1 +1 @@ -10 +11 diff --git a/test/files/specialized/arrays-traits.check b/test/files/specialized/arrays-traits.check index 92af4f13e1..40687a757e 100644 --- a/test/files/specialized/arrays-traits.check +++ b/test/files/specialized/arrays-traits.check @@ -1,6 +1,6 @@ -0 -0 -0 1 2 -1 \ No newline at end of file +1 +3 +4 +2 diff --git a/test/files/specialized/arrays-traits.scala b/test/files/specialized/arrays-traits.scala index de54d22d18..34a1c37a01 100644 --- a/test/files/specialized/arrays-traits.scala +++ b/test/files/specialized/arrays-traits.scala @@ -1,20 +1,12 @@ - - - import runtime.ScalaRunTime._ - - trait SuperS[@specialized(AnyRef) T] { def arr: Array[T] def foo() = arr(0) def bar(b: Array[T]) = b(0) = arr(0) } - -class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] { -} - +class BaseS[@specialized(AnyRef) T](val arr: Array[T]) extends SuperS[T] { } trait SuperG[T] { def arr: Array[T] @@ -22,13 +14,9 @@ trait SuperG[T] { def bar(b: Array[T]) = b(0) = arr(0) } - -class BaseG[T](val arr: Array[T]) extends SuperG[T] { -} - +class BaseG[T](val arr: Array[T]) extends SuperG[T] { } object Test { - def main(args: Array[String]) { (new BaseS(new Array[String](1)): SuperS[String]).foo println(arrayApplyCount) @@ -42,5 +30,4 @@ object Test { println(arrayApplyCount) println(arrayUpdateCount) } - } diff --git a/test/files/specialized/arrays.check b/test/files/specialized/arrays.check index d37dfb720d..8df790f413 100644 --- a/test/files/specialized/arrays.check +++ b/test/files/specialized/arrays.check @@ -1,4 +1,4 @@ -0 -0 50 -51 \ No newline at end of file +51 +101 +102 diff --git a/test/pending/pos/t4012.scala b/test/pending/pos/t4012.scala new file mode 100644 index 0000000000..9b8a1b0dbe --- /dev/null +++ b/test/pending/pos/t4012.scala @@ -0,0 +1,7 @@ +trait C1[+A] { + def head: A = sys.error("") +} +trait C2[@specialized +A] extends C1[A] { + override def head: A = super.head +} +class C3 extends C2[Char] \ No newline at end of file diff --git a/test/pending/pos/t4541.scala b/test/pending/pos/t4541.scala new file mode 100644 index 0000000000..c6d9672cc5 --- /dev/null +++ b/test/pending/pos/t4541.scala @@ -0,0 +1,10 @@ +@SerialVersionUID(1L) +final class SparseArray[@specialized T](private var data : Array[T]) extends Serializable { + def use(inData : Array[T]) = { + data = inData; + } + + def set(that : SparseArray[T]) = { + use(that.data.clone) + } +} \ No newline at end of file diff --git a/test/pending/pos/t4786.scala b/test/pending/pos/t4786.scala new file mode 100644 index 0000000000..f0579142b8 --- /dev/null +++ b/test/pending/pos/t4786.scala @@ -0,0 +1,24 @@ +trait Matrix[@specialized A, Repr[C] <: Matrix[C, Repr]] { // crash goes away if @specialize is removed + def duplicate(mb: MatrixBuilder[A, Repr]): Repr[A] = { + mb.zeros + } +} +trait DenseMatrix[@specialized A] extends Matrix[A, DenseMatrix] +trait DenseMatrixFlt extends DenseMatrix[Float] + +trait MatrixBuilder[@specialized A, Repr[C] <: Matrix[C, Repr]] { + def zeros: Repr[A] +} +object DenseFloatBuilder extends MatrixBuilder[Float, DenseMatrix] { + val zeros = new Object with DenseMatrixFlt + // Note: + // - in 2.9 crash goes away if the explicit type "DenseMatrixFlt" is assigned to "zeros" + // - in 2.9 crash goes away if DenseMatrixFlt is a class instead of a trait: + // val zeros = new DenseMatrixFlt +} + +object Test extends App { + val m1 = DenseFloatBuilder.zeros // in 2.9 crash goes away if explicit type "DenseMatrixFlt" is assigned to m1 + val m2 = m1.duplicate(DenseFloatBuilder) +} + diff --git a/test/pending/pos/t4790.scala b/test/pending/pos/t4790.scala new file mode 100644 index 0000000000..e451fe80ab --- /dev/null +++ b/test/pending/pos/t4790.scala @@ -0,0 +1,4 @@ +package spectest { + class Sp[@specialized A, B](val a: A, val b: B) { } + class Fsp[@specialized A, B](a: A, b: B) extends Sp(a,b) { def ab = (a,b) } +} diff --git a/test/pending/run/t4511.scala b/test/pending/run/t4511.scala new file mode 100644 index 0000000000..58d4e0c7b0 --- /dev/null +++ b/test/pending/run/t4511.scala @@ -0,0 +1,10 @@ +class Interval[@specialized T](val high: T) +class Node[@specialized T](val interval: Interval[T]) { + val x1 = Some(interval.high) +} + +object Test { + def main(args: Array[String]): Unit = { + new Node(new Interval(5)).x1 + } +} \ No newline at end of file diff --git a/test/pending/run/t4971.scala b/test/pending/run/t4971.scala new file mode 100644 index 0000000000..c9b6d6f39f --- /dev/null +++ b/test/pending/run/t4971.scala @@ -0,0 +1,16 @@ +trait A[@specialized(Int) K, @specialized(Double) V] { + def doStuff(k: K, v: V): Unit = sys.error("I am overridden, you cannot call me") +} + +trait B[@specialized(Double) V] extends A[Int, V] { + override def doStuff(k: Int, v: V): Unit = println("Hi - I'm calling doStuff in B") +} + +object Test { + def main(args: Array[String]): Unit = delegate(new B[Double]() {}, 1, 0.1) + + def delegate[@specialized(Int) K, @specialized(Double) V](a: A[K, V], k: K, v: V) { + a.doStuff(k, v) + } +} + diff --git a/test/pending/run/t5284.scala b/test/pending/run/t5284.scala new file mode 100644 index 0000000000..b43afed5b8 --- /dev/null +++ b/test/pending/run/t5284.scala @@ -0,0 +1,14 @@ +object Test { + def main(args:Array[String]) { + val a = Blarg(Array(1,2,3)) + println(a.m((x:Int) => x+1)) + } +} + +object Blarg { + def apply[T:Manifest](a:Array[T]) = new Blarg(a) +} +class Blarg [@specialized T:Manifest](val a:Array[T]) { + def m[@specialized W>:T,@specialized S](f:W=>S) = f(a(0)) +} + -- cgit v1.2.3 From 83c584026d593b6806e1107d645606b9498c05d6 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 10:42:29 +0100 Subject: Add `dup` method to ParCtrie iterators. --- src/library/scala/collection/mutable/Ctrie.scala | 23 ++++++++++++++++++++-- .../collection/parallel/mutable/ParCtrie.scala | 7 ++++++- test/files/run/ctries/iterator.scala | 10 ++++++++++ 3 files changed, 37 insertions(+), 3 deletions(-) diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index 6ed3a516c4..dbd2129f0c 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -852,7 +852,7 @@ object Ctrie extends MutableMapFactory[Ctrie] { } -private[collection] class CtrieIterator[K, V](var level: Int, ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { +private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ctrie[K, V], mustInit: Boolean = true) extends Iterator[(K, V)] { var stack = new Array[Array[BasicNode]](7) var stackpos = new Array[Int](7) var depth = -1 @@ -920,6 +920,25 @@ private[collection] class CtrieIterator[K, V](var level: Int, ct: Ctrie[K, V], m protected def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new CtrieIterator[K, V](_lev, _ct, _mustInit) + protected def dupTo(it: CtrieIterator[K, V]) = { + it.level = this.level + it.ct = this.ct + it.depth = this.depth + it.current = this.current + + // these need a deep copy + Array.copy(this.stack, 0, it.stack, 0, 7) + Array.copy(this.stackpos, 0, it.stackpos, 0, 7) + + // this one needs to be evaluated + if (this.subiter == null) it.subiter = null + else { + val lst = this.subiter.toList + this.subiter = lst.iterator + it.subiter = lst.iterator + } + } + /** Returns a sequence of iterators over subsets of this iterator. * It's used to ease the implementation of splitters for a parallel version of the Ctrie. */ @@ -955,7 +974,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, ct: Ctrie[K, V], m Seq(this) } - private def print { + def printDebug { println("ctrie iterator") println(stackpos.mkString(",")) println("depth: " + depth) diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala index 86624500fd..37add60df9 100644 --- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -92,7 +92,12 @@ extends CtrieIterator[K, V](lev, ct, mustInit) level < maxsplits } - def dup = null // TODO necessary for views + def dup = { + val it = newIterator(0, ct, false) + dupTo(it) + it.iterated = this.iterated + it + } override def next() = { iterated += 1 diff --git a/test/files/run/ctries/iterator.scala b/test/files/run/ctries/iterator.scala index 4bbf9009f0..85a6ab7623 100644 --- a/test/files/run/ctries/iterator.scala +++ b/test/files/run/ctries/iterator.scala @@ -274,6 +274,16 @@ object IteratorSpec extends Spec { while (it.hasNext) it.next() } + "be duplicated" in { + val sz = 50 + val ct = collection.parallel.mutable.ParCtrie((0 until sz) zip (0 until sz): _*) + val it = ct.splitter + for (_ <- 0 until (sz / 2)) it.next() + val dupit = it.dup + + it.toList shouldEqual dupit.toList + } + } } -- cgit v1.2.3 From ada6771679aa63e8aa57a294dfb268b2a7a32df4 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 13:08:43 +0100 Subject: Add lazy size evaluation to Ctries. Size of the Ctrie is now cached and only recomputed for those parts of the Ctrie that changed since the last snapshot. --- .../scala/collection/mutable/CNodeBase.java | 35 +++++++++++++++++ src/library/scala/collection/mutable/Ctrie.scala | 44 +++++++++++++++++++++- src/library/scala/collection/mutable/MainNode.java | 4 ++ test/benchmarking/ParCtrie-size.scala | 34 +++++++++++++++++ 4 files changed, 116 insertions(+), 1 deletion(-) create mode 100644 src/library/scala/collection/mutable/CNodeBase.java create mode 100644 test/benchmarking/ParCtrie-size.scala diff --git a/src/library/scala/collection/mutable/CNodeBase.java b/src/library/scala/collection/mutable/CNodeBase.java new file mode 100644 index 0000000000..4374943b8d --- /dev/null +++ b/src/library/scala/collection/mutable/CNodeBase.java @@ -0,0 +1,35 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2003-2012, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection.mutable; + + + +import java.util.concurrent.atomic.AtomicIntegerFieldUpdater; + + + +abstract class CNodeBase extends MainNode { + + public static final AtomicIntegerFieldUpdater updater = AtomicIntegerFieldUpdater.newUpdater(CNodeBase.class, "csize"); + + public volatile int csize = -1; + + public boolean CAS_SIZE(int oldval, int nval) { + return updater.compareAndSet(this, oldval, nval); + } + + public void WRITE_SIZE(int nval) { + updater.set(this, nval); + } + + public int READ_SIZE() { + return updater.get(this); + } + +} \ No newline at end of file diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index dbd2129f0c..f208d6555e 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -360,6 +360,11 @@ private[mutable] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INo final def isNullInode(ct: Ctrie[K, V]) = GCAS_READ(ct) eq null + final def cachedSize(ct: Ctrie[K, V]): Int = { + val m = GCAS_READ(ct) + m.cachedSize(ct) + } + /* this is a quiescent method! */ def string(lev: Int) = "%sINode -> %s".format(" " * lev, mainnode match { case null => "" @@ -389,6 +394,8 @@ private[mutable] final class FailedNode[K, V](p: MainNode[K, V]) extends MainNod def string(lev: Int) = throw new UnsupportedOperationException + def cachedSize(ct: AnyRef): Int = throw new UnsupportedOperationException + override def toString = "FailedNode(%s)".format(p) } @@ -414,6 +421,7 @@ extends MainNode[K, V] with KVNode[K, V] { final def copyTombed = new TNode(k, v, hc) final def copyUntombed = new SNode(k, v, hc) final def kvPair = (k, v) + final def cachedSize(ct: AnyRef): Int = 1 final def string(lev: Int) = (" " * lev) + "TNode(%s, %s, %x, !)".format(k, v, hc) } @@ -432,12 +440,37 @@ extends MainNode[K, V] { } } def get(k: K) = listmap.get(k) + def cachedSize(ct: AnyRef): Int = listmap.size def string(lev: Int) = (" " * lev) + "LNode(%s)".format(listmap.mkString(", ")) } private[mutable] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen) -extends MainNode[K, V] { +extends CNodeBase[K, V] { + + // this should only be called from within read-only snapshots + final def cachedSize(ct: AnyRef) = { + val currsz = READ_SIZE() + if (currsz != -1) currsz + else { + val sz = computeSize(ct.asInstanceOf[Ctrie[K, V]]) + while (READ_SIZE() == -1) CAS_SIZE(-1, sz) + READ_SIZE() + } + } + + private def computeSize(ct: Ctrie[K, V]): Int = { + var i = 0 + var sz = 0 + while (i < array.length) { + array(i) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] => sz += in.cachedSize(ct) + } + i += 1 + } + sz + } final def updatedAt(pos: Int, nn: BasicNode, gen: Gen) = { val len = array.length @@ -830,6 +863,15 @@ extends ConcurrentMap[K, V] if (nonReadOnly) readOnlySnapshot().iterator else new CtrieIterator(0, this) + private def cachedSize() = { + val r = RDCSS_READ_ROOT() + r.cachedSize(this) + } + + override def size: Int = + if (nonReadOnly) readOnlySnapshot().size + else cachedSize() + override def stringPrefix = "Ctrie" } diff --git a/src/library/scala/collection/mutable/MainNode.java b/src/library/scala/collection/mutable/MainNode.java index 09bc858edc..0578de676d 100644 --- a/src/library/scala/collection/mutable/MainNode.java +++ b/src/library/scala/collection/mutable/MainNode.java @@ -20,6 +20,8 @@ abstract class MainNode extends BasicNode { public volatile MainNode prev = null; + public abstract int cachedSize(Object ct); + public boolean CAS_PREV(MainNode oldval, MainNode nval) { return updater.compareAndSet(this, oldval, nval); } @@ -29,6 +31,8 @@ abstract class MainNode extends BasicNode { } // do we need this? unclear in the javadocs... + // apparently not - volatile reads are supposed to be safe + // irregardless of whether there are concurrent ARFU updates public MainNode READ_PREV() { return updater.get(this); } diff --git a/test/benchmarking/ParCtrie-size.scala b/test/benchmarking/ParCtrie-size.scala new file mode 100644 index 0000000000..5a6191fb62 --- /dev/null +++ b/test/benchmarking/ParCtrie-size.scala @@ -0,0 +1,34 @@ + + + + +import collection.parallel.mutable.ParCtrie + + + +object Size extends testing.Benchmark { + val length = sys.props("length").toInt + val par = sys.props("par").toInt + var parctrie = ParCtrie((0 until length) zip (0 until length): _*) + + collection.parallel.ForkJoinTasks.defaultForkJoinPool.setParallelism(par) + + def run = { + parctrie.size + } + + var iteration = 0 + + override def tearDown() { + iteration += 1 + if (iteration % 4 == 0) parctrie = ParCtrie((0 until length) zip (0 until length): _*) + } + +} + + + + + + + -- cgit v1.2.3 From fe6c9e3f1693f2e6db5ae69517893894bbac6afb Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 15:37:42 +0100 Subject: Add parallel size computation for ParCtrie. Also modified size computation for Ctrie so that concurrent `size` invocations can be parallelized more efficiently. --- .../scala/collection/mutable/BasicNode.java | 2 +- src/library/scala/collection/mutable/Ctrie.scala | 19 ++++++--- .../collection/parallel/mutable/ParCtrie.scala | 47 +++++++++++++++++++++- test/files/run/ctries/concmap.scala | 19 +++++++++ 4 files changed, 78 insertions(+), 9 deletions(-) diff --git a/src/library/scala/collection/mutable/BasicNode.java b/src/library/scala/collection/mutable/BasicNode.java index b934aed24f..c05009470a 100644 --- a/src/library/scala/collection/mutable/BasicNode.java +++ b/src/library/scala/collection/mutable/BasicNode.java @@ -13,7 +13,7 @@ package scala.collection.mutable; -abstract class BasicNode { +public abstract class BasicNode { public abstract String string(int lev); diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index f208d6555e..9a8f4bf276 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -20,7 +20,7 @@ import annotation.switch -private[mutable] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { +private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends INodeBase[K, V](g) { import INodeBase._ WRITE(bn) @@ -405,7 +405,7 @@ private[mutable] trait KVNode[K, V] { } -private[mutable] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class SNode[K, V](final val k: K, final val v: V, final val hc: Int) extends BasicNode with KVNode[K, V] { final def copy = new SNode(k, v, hc) final def copyTombed = new TNode(k, v, hc) @@ -415,7 +415,7 @@ extends BasicNode with KVNode[K, V] { } -private[mutable] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) +private[collection] final class TNode[K, V](final val k: K, final val v: V, final val hc: Int) extends MainNode[K, V] with KVNode[K, V] { final def copy = new TNode(k, v, hc) final def copyTombed = new TNode(k, v, hc) @@ -426,7 +426,7 @@ extends MainNode[K, V] with KVNode[K, V] { } -private[mutable] final class LNode[K, V](final val listmap: ImmutableListMap[K, V]) +private[collection] final class LNode[K, V](final val listmap: ImmutableListMap[K, V]) extends MainNode[K, V] { def this(k: K, v: V) = this(ImmutableListMap(k -> v)) def this(k1: K, v1: V, k2: K, v2: V) = this(ImmutableListMap(k1 -> v1, k2 -> v2)) @@ -445,7 +445,7 @@ extends MainNode[K, V] { } -private[mutable] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen) +private[collection] final class CNode[K, V](final val bitmap: Int, final val array: Array[BasicNode], final val gen: Gen) extends CNodeBase[K, V] { // this should only be called from within read-only snapshots @@ -459,11 +459,18 @@ extends CNodeBase[K, V] { } } + // lends itself towards being parallelizable by choosing + // a random starting offset in the array + // => if there are concurrent size computations, they start + // at different positions, so they are more likely to + // to be independent private def computeSize(ct: Ctrie[K, V]): Int = { var i = 0 var sz = 0 + val offset = math.abs(util.Random.nextInt()) % array.length while (i < array.length) { - array(i) match { + val pos = (i + offset) % array.length + array(pos) match { case sn: SNode[_, _] => sz += 1 case in: INode[K, V] => sz += in.cachedSize(ct) } diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala index 37add60df9..1e11b85da5 100644 --- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -13,6 +13,12 @@ package scala.collection.parallel.mutable import scala.collection.generic._ import scala.collection.parallel.Combiner import scala.collection.parallel.IterableSplitter +import scala.collection.mutable.BasicNode +import scala.collection.mutable.TNode +import scala.collection.mutable.LNode +import scala.collection.mutable.CNode +import scala.collection.mutable.SNode +import scala.collection.mutable.INode import scala.collection.mutable.Ctrie import scala.collection.mutable.CtrieIterator @@ -34,6 +40,7 @@ extends ParMap[K, V] with ParCtrieCombiner[K, V] with Serializable { + import collection.parallel.tasksupport._ def this() = this(new Ctrie) @@ -47,8 +54,6 @@ extends ParMap[K, V] def splitter = new ParCtrieSplitter(0, ctrie.readOnlySnapshot().asInstanceOf[Ctrie[K, V]], true) - override def size = ctrie.size - override def clear() = ctrie.clear() def result = this @@ -71,8 +76,46 @@ extends ParMap[K, V] this } + override def size = { + val in = ctrie.RDCSS_READ_ROOT() + val r = in.GCAS_READ(ctrie) + r match { + case tn: TNode[_, _] => tn.cachedSize(ctrie) + case ln: LNode[_, _] => ln.cachedSize(ctrie) + case cn: CNode[_, _] => + executeAndWaitResult(new Size(0, cn.array.length, cn.array)) + cn.cachedSize(ctrie) + } + } + override def stringPrefix = "ParCtrie" + /* tasks */ + + /** Computes Ctrie size in parallel. */ + class Size(offset: Int, howmany: Int, array: Array[BasicNode]) extends Task[Int, Size] { + var result = -1 + def leaf(prev: Option[Int]) = { + var sz = 0 + var i = offset + val until = offset + howmany + while (i < until) { + array(i) match { + case sn: SNode[_, _] => sz += 1 + case in: INode[K, V] => sz += in.cachedSize(ctrie) + } + i += 1 + } + result = sz + } + def split = { + val fp = howmany / 2 + Seq(new Size(offset, fp, array), new Size(offset + fp, howmany - fp, array)) + } + def shouldSplitFurther = howmany > 1 + override def merge(that: Size) = result = result + that.result + } + } diff --git a/test/files/run/ctries/concmap.scala b/test/files/run/ctries/concmap.scala index 85a305ce5b..d73e33182a 100644 --- a/test/files/run/ctries/concmap.scala +++ b/test/files/run/ctries/concmap.scala @@ -164,6 +164,25 @@ object ConcurrentMapSpec extends Spec { for (i <- 0 until sz) assertEqual(ct.get(new Wrap(i)), None) } + "compute size correctly" in { + val ct = new Ctrie[Wrap, Int] + val sz = 36450 + for (i <- 0 until sz) ct(new Wrap(i)) = i + + assertEqual(ct.size, sz) + assertEqual(ct.size, sz) + } + + "compute size correctly in parallel" in { + val ct = new Ctrie[Wrap, Int] + val sz = 36450 + for (i <- 0 until sz) ct(new Wrap(i)) = i + val pct = ct.par + + assertEqual(pct.size, sz) + assertEqual(pct.size, sz) + } + } } -- cgit v1.2.3 From 7c689e365f333d05fc94c890d9579f3721057ea8 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 16:04:04 +0100 Subject: Implement `remaining` method in ParCtrie splitter. --- src/library/scala/collection/parallel/mutable/ParCtrie.scala | 3 +-- 1 file changed, 1 insertion(+), 2 deletions(-) diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala index 1e11b85da5..8cf8b6167e 100644 --- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -124,8 +124,7 @@ extends CtrieIterator[K, V](lev, ct, mustInit) with IterableSplitter[(K, V)] { // only evaluated if `remaining` is invoked (which is not used by most tasks) - //lazy val totalsize = ct.iterator.size /* TODO improve to lazily compute sizes */ - def totalsize: Int = throw new UnsupportedOperationException + lazy val totalsize = ct.par.size var iterated = 0 protected override def newIterator(_lev: Int, _ct: Ctrie[K, V], _mustInit: Boolean) = new ParCtrieSplitter[K, V](_lev, _ct, _mustInit) -- cgit v1.2.3 From e4b5c002b12e17150740283619e12fd6dfab5442 Mon Sep 17 00:00:00 2001 From: Erik Osheim Date: Wed, 15 Feb 2012 10:25:00 -0500 Subject: Improve handling of final and @inline in specialization. Previously, the specialize phase removed FINAL from all specialized methods, but left the @inline annotation alone, causing warnings. This patch does two things: 1. It only removes final from the original class' methods which are overridden, while leaving it on the specialized subclasses' methods. 2. When removing final, it also removes @inline, to prevent spurious warnings. This was intended to fix SI-5005, however there are deeper problems which prevent inlining from working even with this fixed. --- .../tools/nsc/transform/SpecializeTypes.scala | 25 ++++++++++++++++++---- 1 file changed, 21 insertions(+), 4 deletions(-) diff --git a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala index 05f5dbc379..8c34a9139d 100644 --- a/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala +++ b/src/compiler/scala/tools/nsc/transform/SpecializeTypes.scala @@ -69,7 +69,7 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { ScalaValueClasses, isValueClass, isScalaValueType, SpecializedClass, RepeatedParamClass, JavaRepeatedParamClass, AnyRefClass, ObjectClass, AnyRefModule, - GroupOfSpecializable, uncheckedVarianceClass + GroupOfSpecializable, uncheckedVarianceClass, ScalaInlineClass } /** TODO - this is a lot of maps. @@ -832,7 +832,6 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { log("-->d SETTING PRIVATE WITHIN TO " + sym.enclosingPackage + " for " + sym) } - sym.resetFlag(FINAL) val specMember = subst(outerEnv)(specializedOverload(owner, sym, spec)) typeEnv(specMember) = typeEnv(sym) ++ outerEnv ++ spec wasSpecializedForTypeVars(specMember) ++= spec collect { case (s, tp) if s.tpe == tp => s } @@ -1733,9 +1732,27 @@ abstract class SpecializeTypes extends InfoTransform with TypingTransformers { class SpecializationTransformer(unit: CompilationUnit) extends Transformer { informProgress("specializing " + unit) - override def transform(tree: Tree) = - if (settings.nospecialization.value) tree + override def transform(tree: Tree) = { + val resultTree = if (settings.nospecialization.value) tree else atPhase(phase.next)(specializeCalls(unit).transform(tree)) + + // Remove the final modifier and @inline annotation from anything in the + // original class (since it's being overridden in at least onesubclass). + // + // We do this here so that the specialized subclasses will correctly copy + // final and @inline. + info.foreach { + case (sym, SpecialOverload(target, _)) => { + sym.resetFlag(FINAL) + target.resetFlag(FINAL) + sym.removeAnnotation(ScalaInlineClass) + target.removeAnnotation(ScalaInlineClass) + } + case _ => {} + } + + resultTree + } } def printSpecStats() { -- cgit v1.2.3 From 9e224c4f66c232cce8bbfa7e9ca25a84222baffd Mon Sep 17 00:00:00 2001 From: Eugene Burmako Date: Wed, 15 Feb 2012 16:07:27 +0100 Subject: Makes multiline interpolation work finely --- .../scala/tools/nsc/ast/parser/Scanners.scala | 8 ++++--- test/files/run/interpolationMultiline1.check | 26 ++++++++++++++++++++++ test/files/run/interpolationMultiline1.flags | 1 + test/files/run/interpolationMultiline1.scala | 26 ++++++++++++++++++++++ test/files/run/interpolationMultiline2.check | 26 ++++++++++++++++++++++ test/files/run/interpolationMultiline2.flags | 1 + test/files/run/interpolationMultiline2.scala | 21 +++++++++++++++++ 7 files changed, 106 insertions(+), 3 deletions(-) create mode 100644 test/files/run/interpolationMultiline1.check create mode 100644 test/files/run/interpolationMultiline1.flags create mode 100644 test/files/run/interpolationMultiline1.scala create mode 100644 test/files/run/interpolationMultiline2.check create mode 100644 test/files/run/interpolationMultiline2.flags create mode 100644 test/files/run/interpolationMultiline2.scala diff --git a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala index dae264fffe..f712c7411f 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/Scanners.scala @@ -180,7 +180,7 @@ trait Scanners extends ScannersCommon { * @pre: inStringInterpolation */ @inline private def inMultiLineInterpolation = - sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART + inStringInterpolation && sepRegions.tail.nonEmpty && sepRegions.tail.head == STRINGPART /** read next token and return last offset */ @@ -217,7 +217,9 @@ trait Scanners extends ScannersCommon { if (!sepRegions.isEmpty && sepRegions.head == lastToken) sepRegions = sepRegions.tail case STRINGLIT => - if (inStringInterpolation) + if (inMultiLineInterpolation) + sepRegions = sepRegions.tail.tail + else if (inStringInterpolation) sepRegions = sepRegions.tail case _ => } @@ -386,7 +388,7 @@ trait Scanners extends ScannersCommon { if (ch == '\"') { nextRawChar() getStringPart(multiLine = true) - sepRegions = STRINGLIT :: sepRegions // indicate string part + sepRegions = STRINGPART :: sepRegions // indicate string part sepRegions = STRINGLIT :: sepRegions // once more to indicate multi line string part } else { token = STRINGLIT diff --git a/test/files/run/interpolationMultiline1.check b/test/files/run/interpolationMultiline1.check new file mode 100644 index 0000000000..09579a800a --- /dev/null +++ b/test/files/run/interpolationMultiline1.check @@ -0,0 +1,26 @@ +Bob is 1 years old +Bob is 1 years old +Bob will be 2 years old +Bob will be 2 years old +1+1 = 2 +1+1 = 2 +Bob is 12 years old +Bob is 12 years old +Bob will be 13 years old +Bob will be 13 years old +12+1 = 13 +12+1 = 13 +Bob is 123 years old +Bob is 123 years old +Bob will be 124 years old +Bob will be 124 years old +123+1 = 124 +123+1 = 124 +Best price: 10.0 +Best price: 10.00 +10.0% discount included +10.00% discount included +Best price: 13.345 +Best price: 13.35 +13.345% discount included +13.35% discount included diff --git a/test/files/run/interpolationMultiline1.flags b/test/files/run/interpolationMultiline1.flags new file mode 100644 index 0000000000..48fd867160 --- /dev/null +++ b/test/files/run/interpolationMultiline1.flags @@ -0,0 +1 @@ +-Xexperimental diff --git a/test/files/run/interpolationMultiline1.scala b/test/files/run/interpolationMultiline1.scala new file mode 100644 index 0000000000..437aed44b0 --- /dev/null +++ b/test/files/run/interpolationMultiline1.scala @@ -0,0 +1,26 @@ +object Test extends App { + + def test1(n: Int) = { + println(s"""Bob is $n years old""") + println(f"""Bob is $n%2d years old""") + println(s"""Bob will be ${n+1} years old""") + println(f"""Bob will be ${n+1}%2d years old""") + println(s"""$n+1 = ${n+1}""") + println(f"""$n%d+1 = ${n+1}%d""") + } + + def test2(f: Float) = { + println(s"""Best price: $f""") + println(f"""Best price: $f%.2f""") + println(s"""$f% discount included""") + println(f"""$f%3.2f% discount included""") + } + + test1(1) + test1(12) + test1(123) + + test2(10.0f) + test2(13.345f) + +} diff --git a/test/files/run/interpolationMultiline2.check b/test/files/run/interpolationMultiline2.check new file mode 100644 index 0000000000..7584aee9f7 --- /dev/null +++ b/test/files/run/interpolationMultiline2.check @@ -0,0 +1,26 @@ +Bob is 1 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 1 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 1 years old! +Bob is 1%2d years old! +Bob is 1 years old! +Bob is 1%2d years old! +=============== +Bob is 12 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 12 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 12 years old! +Bob is 12%2d years old! +Bob is 12 years old! +Bob is 12%2d years old! +=============== +Bob is 123 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 123 years old! +java.lang.StringIndexOutOfBoundsException: String index out of range: 0 +Bob is 123 years old! +Bob is 123%2d years old! +Bob is 123 years old! +Bob is 123%2d years old! \ No newline at end of file diff --git a/test/files/run/interpolationMultiline2.flags b/test/files/run/interpolationMultiline2.flags new file mode 100644 index 0000000000..e1b37447c9 --- /dev/null +++ b/test/files/run/interpolationMultiline2.flags @@ -0,0 +1 @@ +-Xexperimental \ No newline at end of file diff --git a/test/files/run/interpolationMultiline2.scala b/test/files/run/interpolationMultiline2.scala new file mode 100644 index 0000000000..f6a682c3ce --- /dev/null +++ b/test/files/run/interpolationMultiline2.scala @@ -0,0 +1,21 @@ +object Test extends App { + + def test1(n: Int) = { + val old = "old" + try { println(s"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(s"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(f"""Bob is ${s"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(f"""Bob is ${f"$n"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(f"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(f"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(s"""Bob is ${f"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) } + try { println(s"""Bob is ${s"$n%2d"} years ${s"$old"}!""") } catch { case ex => println(ex) } + } + + test1(1) + println("===============") + test1(12) + println("===============") + test1(123) + +} -- cgit v1.2.3 From 910adaa6dd33a1510f4f9c794d91d8c39459e3d8 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 17:20:18 +0100 Subject: Refactor Ctries to eliminate "Cannot inline method" messages. --- src/library/scala/collection/mutable/Ctrie.scala | 26 +++++++++++++--------- .../collection/parallel/mutable/ParCtrie.scala | 4 ++-- 2 files changed, 17 insertions(+), 13 deletions(-) diff --git a/src/library/scala/collection/mutable/Ctrie.scala b/src/library/scala/collection/mutable/Ctrie.scala index 9a8f4bf276..699b96b87c 100644 --- a/src/library/scala/collection/mutable/Ctrie.scala +++ b/src/library/scala/collection/mutable/Ctrie.scala @@ -31,6 +31,8 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends @inline final def CAS(old: MainNode[K, V], n: MainNode[K, V]) = INodeBase.updater.compareAndSet(this, old, n) + final def gcasRead(ct: Ctrie[K, V]): MainNode[K, V] = GCAS_READ(ct) + @inline final def GCAS_READ(ct: Ctrie[K, V]): MainNode[K, V] = { val m = /*READ*/mainnode val prevval = /*READ*/m.prev @@ -41,7 +43,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends @tailrec private def GCAS_Complete(m: MainNode[K, V], ct: Ctrie[K, V]): MainNode[K, V] = if (m eq null) null else { // complete the GCAS val prev = /*READ*/m.prev - val ctr = ct.RDCSS_READ_ROOT(true) + val ctr = ct.readRoot(true) prev match { case null => @@ -84,7 +86,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends nin } - @inline final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = { + final def copyToGen(ngen: Gen, ct: Ctrie[K, V]) = { val nin = new INode[K, V](ngen) val main = GCAS_READ(ct) nin.WRITE(main) @@ -317,7 +319,7 @@ private[collection] final class INode[K, V](bn: MainNode[K, V], g: Gen) extends case tn: TNode[K, V] => val ncn = cn.updatedAt(pos, tn.copyUntombed, gen).toContracted(lev - 5) if (!parent.GCAS(cn, ncn, ct)) - if (ct.RDCSS_READ_ROOT().gen == startgen) cleanParent(nonlive) + if (ct.readRoot().gen == startgen) cleanParent(nonlive) } } case _ => // parent is no longer a cnode, we're done @@ -549,7 +551,7 @@ extends CNodeBase[K, V] { val sub = arr(i) sub match { case in: INode[K, V] => - val inodemain = in.GCAS_READ(ct) + val inodemain = in.gcasRead(ct) assert(inodemain ne null) tmparray(i) = resurrect(in, inodemain) case sn: SNode[K, V] => @@ -670,6 +672,8 @@ extends ConcurrentMap[K, V] @inline final def CAS_ROOT(ov: AnyRef, nv: AnyRef) = rootupdater.compareAndSet(this, ov, nv) + final def readRoot(abort: Boolean = false): INode[K, V] = RDCSS_READ_ROOT(abort) + @inline final def RDCSS_READ_ROOT(abort: Boolean = false): INode[K, V] = { val r = /*READ*/root r match { @@ -688,7 +692,7 @@ extends ConcurrentMap[K, V] if (CAS_ROOT(desc, ov)) ov else RDCSS_Complete(abort) } else { - val oldmain = ov.GCAS_READ(this) + val oldmain = ov.gcasRead(this) if (oldmain eq exp) { if (CAS_ROOT(desc, nv)) { desc.committed = true @@ -760,9 +764,9 @@ extends ConcurrentMap[K, V] override def empty: Ctrie[K, V] = new Ctrie[K, V] - @inline final def isReadOnly = rootupdater eq null + final def isReadOnly = rootupdater eq null - @inline final def nonReadOnly = rootupdater ne null + final def nonReadOnly = rootupdater ne null /** Returns a snapshot of this Ctrie. * This operation is lock-free and linearizable. @@ -775,7 +779,7 @@ extends ConcurrentMap[K, V] */ @tailrec final def snapshot(): Ctrie[K, V] = { val r = RDCSS_READ_ROOT() - val expmain = r.GCAS_READ(this) + val expmain = r.gcasRead(this) if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r.copyToGen(new Gen, this), rootupdater) else snapshot() } @@ -794,14 +798,14 @@ extends ConcurrentMap[K, V] */ @tailrec final def readOnlySnapshot(): collection.Map[K, V] = { val r = RDCSS_READ_ROOT() - val expmain = r.GCAS_READ(this) + val expmain = r.gcasRead(this) if (RDCSS_ROOT(r, expmain, r.copyToGen(new Gen, this))) new Ctrie(r, null) else readOnlySnapshot() } @tailrec final override def clear() { val r = RDCSS_READ_ROOT() - if (!RDCSS_ROOT(r, r.GCAS_READ(this), INode.newRootNode[K, V])) clear() + if (!RDCSS_ROOT(r, r.gcasRead(this), INode.newRootNode[K, V])) clear() } final def lookup(k: K): V = { @@ -924,7 +928,7 @@ private[collection] class CtrieIterator[K, V](var level: Int, private var ct: Ct r } else Iterator.empty.next() - private def readin(in: INode[K, V]) = in.GCAS_READ(ct) match { + private def readin(in: INode[K, V]) = in.gcasRead(ct) match { case cn: CNode[K, V] => depth += 1 stack(depth) = cn.array diff --git a/src/library/scala/collection/parallel/mutable/ParCtrie.scala b/src/library/scala/collection/parallel/mutable/ParCtrie.scala index 8cf8b6167e..cec2e6886d 100644 --- a/src/library/scala/collection/parallel/mutable/ParCtrie.scala +++ b/src/library/scala/collection/parallel/mutable/ParCtrie.scala @@ -77,8 +77,8 @@ extends ParMap[K, V] } override def size = { - val in = ctrie.RDCSS_READ_ROOT() - val r = in.GCAS_READ(ctrie) + val in = ctrie.readRoot() + val r = in.gcasRead(ctrie) r match { case tn: TNode[_, _] => tn.cachedSize(ctrie) case ln: LNode[_, _] => ln.cachedSize(ctrie) -- cgit v1.2.3 From 0d315d652ae34e1c4cf1098cc753a43d4862e362 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 15 Feb 2012 10:24:55 -0800 Subject: Restored scala.reflect.Code. There's a deprecation process to follow before removing stuff. scala.reflect.Code isn't even deprecated in 2.9.1. --- src/library/scala/reflect/Code.scala | 24 ++++++++++++++++++++++++ 1 file changed, 24 insertions(+) create mode 100644 src/library/scala/reflect/Code.scala diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala new file mode 100644 index 0000000000..5010a0d614 --- /dev/null +++ b/src/library/scala/reflect/Code.scala @@ -0,0 +1,24 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + + +package scala.reflect + +/** This type is required by the compiler and should not be used in client code. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10.0") +class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) { + val manifest = implicitly[Manifest[T]] + override def toString = "Code(tree = "+tree+", manifest = "+manifest+")" +} + +/** This type is required by the compiler and should not be used in client code. */ +@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10.0") +object Code { + def lift[A](tree: A): Code[A] = + throw new Error("Code was not lifted by compiler") +} -- cgit v1.2.3 From f0f5ad3c81431eba27d590f80872306f60d01505 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 15 Feb 2012 20:50:25 +0100 Subject: Apply the fix for si-5293 to hash maps. This fix was previously only applied to hash sets. --- .../scala/collection/mutable/HashTable.scala | 32 ++++++-- .../collection/parallel/mutable/ParHashMap.scala | 10 ++- test/files/jvm/serialization.check | 8 +- test/files/run/t5293-map.scala | 88 ++++++++++++++++++++++ 4 files changed, 122 insertions(+), 16 deletions(-) create mode 100644 test/files/run/t5293-map.scala diff --git a/src/library/scala/collection/mutable/HashTable.scala b/src/library/scala/collection/mutable/HashTable.scala index cdf1b78f29..5b3e07b826 100644 --- a/src/library/scala/collection/mutable/HashTable.scala +++ b/src/library/scala/collection/mutable/HashTable.scala @@ -52,6 +52,10 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU */ @transient protected var sizemap: Array[Int] = null + @transient var seedvalue: Int = tableSizeSeed + + protected def tableSizeSeed = Integer.bitCount(table.length - 1) + protected def initialSize: Int = HashTable.initialSize private def lastPopulatedIndex = { @@ -70,14 +74,16 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU private[collection] def init[B](in: java.io.ObjectInputStream, f: (A, B) => Entry) { in.defaultReadObject - _loadFactor = in.readInt + _loadFactor = in.readInt() assert(_loadFactor > 0) - val size = in.readInt + val size = in.readInt() tableSize = 0 assert(size >= 0) - - val smDefined = in.readBoolean + + seedvalue = in.readInt() + + val smDefined = in.readBoolean() table = new Array(capacity(sizeForThreshold(_loadFactor, size))) threshold = newThreshold(_loadFactor, table.size) @@ -86,7 +92,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU var index = 0 while (index < size) { - addEntry(f(in.readObject.asInstanceOf[A], in.readObject.asInstanceOf[B])) + addEntry(f(in.readObject().asInstanceOf[A], in.readObject().asInstanceOf[B])) index += 1 } } @@ -103,6 +109,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU out.defaultWriteObject out.writeInt(_loadFactor) out.writeInt(tableSize) + out.writeInt(seedvalue) out.writeBoolean(isSizeMapDefined) foreachEntry { entry => out.writeObject(entry.key) @@ -314,7 +321,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU // this is of crucial importance when populating the table in parallel protected final def index(hcode: Int) = { val ones = table.length - 1 - val improved = improve(hcode) + val improved = improve(hcode, seedvalue) val shifted = (improved >> (32 - java.lang.Integer.bitCount(ones))) & ones shifted } @@ -325,6 +332,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU table = c.table tableSize = c.tableSize threshold = c.threshold + seedvalue = c.seedvalue sizemap = c.sizemap } if (alwaysInitSizeMap && sizemap == null) sizeMapInitAndRebuild @@ -335,6 +343,7 @@ trait HashTable[A, Entry >: Null <: HashEntry[A, Entry]] extends HashTable.HashU table, tableSize, threshold, + seedvalue, sizemap ) } @@ -368,7 +377,7 @@ private[collection] object HashTable { protected def elemHashCode(key: KeyType) = key.## - protected final def improve(hcode: Int) = { + protected final def improve(hcode: Int, seed: Int) = { /* Murmur hash * m = 0x5bd1e995 * r = 24 @@ -396,7 +405,7 @@ private[collection] object HashTable { * */ var i = hcode * 0x9e3775cd i = java.lang.Integer.reverseBytes(i) - i * 0x9e3775cd + i = i * 0x9e3775cd // a slower alternative for byte reversal: // i = (i << 16) | (i >> 16) // i = ((i >> 8) & 0x00ff00ff) | ((i << 8) & 0xff00ff00) @@ -420,6 +429,11 @@ private[collection] object HashTable { // h = h ^ (h >>> 14) // h = h + (h << 4) // h ^ (h >>> 10) + + // the rest of the computation is due to SI-5293 + val rotation = seed % 32 + val rotated = (i >>> rotation) | (i << (32 - rotation)) + rotated } } @@ -442,6 +456,7 @@ private[collection] object HashTable { val table: Array[HashEntry[A, Entry]], val tableSize: Int, val threshold: Int, + val seedvalue: Int, val sizemap: Array[Int] ) { import collection.DebugUtils._ @@ -452,6 +467,7 @@ private[collection] object HashTable { append("Table: [" + arrayString(table, 0, table.length) + "]") append("Table size: " + tableSize) append("Load factor: " + loadFactor) + append("Seedvalue: " + seedvalue) append("Threshold: " + threshold) append("Sizemap: [" + arrayString(sizemap, 0, sizemap.length) + "]") } diff --git a/src/library/scala/collection/parallel/mutable/ParHashMap.scala b/src/library/scala/collection/parallel/mutable/ParHashMap.scala index 15ffd3fdd2..21a5b05749 100644 --- a/src/library/scala/collection/parallel/mutable/ParHashMap.scala +++ b/src/library/scala/collection/parallel/mutable/ParHashMap.scala @@ -160,10 +160,11 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr import collection.parallel.tasksupport._ private var mask = ParHashMapCombiner.discriminantmask private var nonmasklen = ParHashMapCombiner.nonmasklength + private var seedvalue = 27 def +=(elem: (K, V)) = { sz += 1 - val hc = improve(elemHashCode(elem._1)) + val hc = improve(elemHashCode(elem._1), seedvalue) val pos = (hc >>> nonmasklen) if (buckets(pos) eq null) { // initialize bucket @@ -176,7 +177,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr def result: ParHashMap[K, V] = if (size >= (ParHashMapCombiner.numblocks * sizeMapBucketSize)) { // 1024 // construct table - val table = new AddingHashTable(size, tableLoadFactor) + val table = new AddingHashTable(size, tableLoadFactor, seedvalue) val bucks = buckets.map(b => if (b ne null) b.headPtr else null) val insertcount = executeAndWaitResult(new FillBlocks(bucks, table, 0, bucks.length)) table.setSize(insertcount) @@ -210,11 +211,12 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr * and true if the key was successfully inserted. It does not update the number of elements * in the table. */ - private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int) extends HashTable[K, DefaultEntry[K, V]] { + private[ParHashMapCombiner] class AddingHashTable(numelems: Int, lf: Int, _seedvalue: Int) extends HashTable[K, DefaultEntry[K, V]] { import HashTable._ _loadFactor = lf table = new Array[HashEntry[K, DefaultEntry[K, V]]](capacity(sizeForThreshold(_loadFactor, numelems))) tableSize = 0 + seedvalue = _seedvalue threshold = newThreshold(_loadFactor, table.length) sizeMapInit(table.length) def setSize(sz: Int) = tableSize = sz @@ -285,7 +287,7 @@ extends collection.parallel.BucketCombiner[(K, V), ParHashMap[K, V], DefaultEntr insertcount } private def assertCorrectBlock(block: Int, k: K) { - val hc = improve(elemHashCode(k)) + val hc = improve(elemHashCode(k), seedvalue) if ((hc >>> nonmasklen) != block) { println(hc + " goes to " + (hc >>> nonmasklen) + ", while expected block is " + block) assert((hc >>> nonmasklen) == block) diff --git a/test/files/jvm/serialization.check b/test/files/jvm/serialization.check index 67b77639a2..81b68f0f5d 100644 --- a/test/files/jvm/serialization.check +++ b/test/files/jvm/serialization.check @@ -156,8 +156,8 @@ x = BitSet(0, 8, 9) y = BitSet(0, 8, 9) x equals y: true, y equals x: true -x = Map(C -> 3, B -> 2, A -> 1) -y = Map(C -> 3, A -> 1, B -> 2) +x = Map(A -> 1, C -> 3, B -> 2) +y = Map(A -> 1, C -> 3, B -> 2) x equals y: true, y equals x: true x = Set(buffers, title, layers) @@ -283,8 +283,8 @@ x = ParArray(abc, def, etc) y = ParArray(abc, def, etc) x equals y: true, y equals x: true -x = ParHashMap(1 -> 2, 2 -> 4) -y = ParHashMap(1 -> 2, 2 -> 4) +x = ParHashMap(2 -> 4, 1 -> 2) +y = ParHashMap(2 -> 4, 1 -> 2) x equals y: true, y equals x: true x = ParCtrie(1 -> 2, 2 -> 4) diff --git a/test/files/run/t5293-map.scala b/test/files/run/t5293-map.scala new file mode 100644 index 0000000000..9e186894fc --- /dev/null +++ b/test/files/run/t5293-map.scala @@ -0,0 +1,88 @@ + + + +import scala.collection.JavaConverters._ + + + +object Test extends App { + + def bench(label: String)(body: => Unit): Long = { + val start = System.nanoTime + + 0.until(10).foreach(_ => body) + + val end = System.nanoTime + + //println("%s: %s ms".format(label, (end - start) / 1000.0 / 1000.0)) + + end - start + } + + def benchJava(values: java.util.Map[Int, Int]) = { + bench("Java Map") { + val m = new java.util.HashMap[Int, Int] + + m.putAll(values) + } + } + + def benchScala(values: Iterable[(Int, Int)]) = { + bench("Scala Map") { + val m = new scala.collection.mutable.HashMap[Int, Int] + + m ++= values + } + } + + def benchScalaSorted(values: Iterable[(Int, Int)]) = { + bench("Scala Map sorted") { + val m = new scala.collection.mutable.HashMap[Int, Int] + + m ++= values.toArray.sorted + } + } + + def benchScalaPar(values: Iterable[(Int, Int)]) = { + bench("Scala ParMap") { + val m = new scala.collection.parallel.mutable.ParHashMap[Int, Int] map { x => x } + + m ++= values + } + } + + val total = 50000 + val values = (0 until total) zip (0 until total) + val map = scala.collection.mutable.HashMap.empty[Int, Int] + + map ++= values + + // warmup + for (x <- 0 until 5) { + benchJava(map.asJava) + benchScala(map) + benchScalaPar(map) + benchJava(map.asJava) + benchScala(map) + benchScalaPar(map) + } + + val javamap = benchJava(map.asJava) + val scalamap = benchScala(map) + val scalaparmap = benchScalaPar(map) + + // println(javamap) + // println(scalamap) + // println(scalaparmap) + + assert(scalamap < (javamap * 4)) + assert(scalaparmap < (javamap * 4)) +} + + + + + + + + -- cgit v1.2.3 From a29173afd1e1b4d840a2badaa36ac97f60225044 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 15 Feb 2012 11:55:38 -0800 Subject: Revert "Restored scala.reflect.Code." This reverts commit 0d315d652ae34e1c4cf1098cc753a43d4862e362. I forgot Code had a big warning label saying "don't use me." --- src/library/scala/reflect/Code.scala | 24 ------------------------ 1 file changed, 24 deletions(-) delete mode 100644 src/library/scala/reflect/Code.scala diff --git a/src/library/scala/reflect/Code.scala b/src/library/scala/reflect/Code.scala deleted file mode 100644 index 5010a0d614..0000000000 --- a/src/library/scala/reflect/Code.scala +++ /dev/null @@ -1,24 +0,0 @@ -/* __ *\ -** ________ ___ / / ___ Scala API ** -** / __/ __// _ | / / / _ | (c) 2002-2011, LAMP/EPFL ** -** __\ \/ /__/ __ |/ /__/ __ | http://scala-lang.org/ ** -** /____/\___/_/ |_/____/_/ | | ** -** |/ ** -\* */ - - -package scala.reflect - -/** This type is required by the compiler and should not be used in client code. */ -@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10.0") -class Code[T: Manifest](val tree: scala.reflect.mirror.Tree) { - val manifest = implicitly[Manifest[T]] - override def toString = "Code(tree = "+tree+", manifest = "+manifest+")" -} - -/** This type is required by the compiler and should not be used in client code. */ -@deprecated("Replaced with scala.reflect.macro.Context#reify, will be completely removed soon", "2.10.0") -object Code { - def lift[A](tree: A): Code[A] = - throw new Error("Code was not lifted by compiler") -} -- cgit v1.2.3 From 9655fafbc89b650b92f239aa7f69df7a16e3542b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 15 Feb 2012 12:14:43 -0800 Subject: Revert "Make fix for SI-5452 not break other things." This reverts commit a725bf982c06e16c5d533ea6b2227b726db4f7e4. --- src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala | 4 +++- src/compiler/scala/tools/nsc/typechecker/Infer.scala | 7 +------ test/files/neg/t5452.check | 4 +--- 3 files changed, 5 insertions(+), 10 deletions(-) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 6ee09d064f..466b5125a8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -714,9 +714,11 @@ trait ContextErrors { setError(tree) } - def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = { issueNormalTypeError(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) + setError(tree) + } def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, argtpes: List[Type], pt: Type) = { diff --git a/src/compiler/scala/tools/nsc/typechecker/Infer.scala b/src/compiler/scala/tools/nsc/typechecker/Infer.scala index acf905d974..b97fbebec2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Infer.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Infer.scala @@ -1466,9 +1466,7 @@ trait Infer { argtpes: List[Type], pt0: Type, varArgsOnly: Boolean = false): Unit = tree.tpe match { case OverloadedType(pre, alts) => val pt = if (pt0.typeSymbol == UnitClass) WildcardType else pt0 - var secondTry = true tryTwice { - secondTry = !secondTry debuglog("infer method alt "+ tree.symbol +" with alternatives "+ (alts map pre.memberType) +", argtpes = "+ argtpes +", pt = "+ pt) @@ -1490,11 +1488,8 @@ trait Infer { if (improves(alt, best)) alt else best) val competing = applicable.dropWhile(alt => best == alt || improves(best, alt)) if (best == NoSymbol) { - if (pt == WildcardType) { + if (pt == WildcardType) NoBestMethodAlternativeError(tree, argtpes, pt) - if (secondTry) - setError(tree) - } else inferMethodAlternative(tree, undetparams, argtpes, WildcardType) } else if (!competing.isEmpty) { diff --git a/test/files/neg/t5452.check b/test/files/neg/t5452.check index 2f35a45509..baf544499b 100644 --- a/test/files/neg/t5452.check +++ b/test/files/neg/t5452.check @@ -1,7 +1,5 @@ t5452.scala:28: error: overloaded method value apply with alternatives: - ()Queryable[CoffeesTable] - (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing - (implicit evidence$1: Manifest[CoffeesTable])Nothing + cannot be applied to (Queryable[CoffeesTable]) Queryable[CoffeesTable]( q.treeFilter(null) ) ^ -- cgit v1.2.3 From 6543b2d983a4e44ad9bd2ea036d53ce52c37284c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 15 Feb 2012 12:14:49 -0800 Subject: Revert "Fix for SI-5452." This reverts commit 2b731911e97a281e324060099631e2374b2144ec. --- .../tools/nsc/typechecker/ContextErrors.scala | 4 +-- test/files/neg/t5452.check | 6 ----- test/files/neg/t5452.scala | 29 ---------------------- 3 files changed, 1 insertion(+), 38 deletions(-) delete mode 100644 test/files/neg/t5452.check delete mode 100644 test/files/neg/t5452.scala diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 466b5125a8..6ee09d064f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -714,11 +714,9 @@ trait ContextErrors { setError(tree) } - def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = { + def NoBestMethodAlternativeError(tree: Tree, argtpes: List[Type], pt: Type) = issueNormalTypeError(tree, applyErrorMsg(tree, " cannot be applied to ", argtpes, pt)) - setError(tree) - } def AmbiguousMethodAlternativeError(tree: Tree, pre: Type, best: Symbol, firstCompeting: Symbol, argtpes: List[Type], pt: Type) = { diff --git a/test/files/neg/t5452.check b/test/files/neg/t5452.check deleted file mode 100644 index baf544499b..0000000000 --- a/test/files/neg/t5452.check +++ /dev/null @@ -1,6 +0,0 @@ -t5452.scala:28: error: overloaded method value apply with alternatives: - - cannot be applied to (Queryable[CoffeesTable]) - Queryable[CoffeesTable]( q.treeFilter(null) ) - ^ -one error found diff --git a/test/files/neg/t5452.scala b/test/files/neg/t5452.scala deleted file mode 100644 index 1032db7a4b..0000000000 --- a/test/files/neg/t5452.scala +++ /dev/null @@ -1,29 +0,0 @@ -// /scala/trac/5452/a.scala -// Mon Feb 13 22:52:36 PST 2012 - -// import scala.reflect.mirror._ - -trait Tree - -object Bip { - def ??? = sys.error("") -} -import Bip._ - -case class Queryable[T]() { - def treeFilter( t:Tree ) : Queryable[T] = ??? -} - -object Queryable { - def apply[T:Manifest] = ??? - def apply[T:Manifest]( t:Tree ) = ??? -} - -trait CoffeesTable{ - def sales : Int -} - -object Test extends App{ - val q = new Queryable[CoffeesTable] - Queryable[CoffeesTable]( q.treeFilter(null) ) -} -- cgit v1.2.3 From 32646c3b84ecb4ae7946c3ea6f74eaade38bc4e0 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 15 Feb 2012 12:15:38 -0800 Subject: Test cases which motivated preceding reversions. See t5452 for disabled/neg/t5452 and disabled/pos/spurious-overload I give up, review by @hubertp. --- test/disabled/neg/t5452.check | 8 ++++++++ test/disabled/neg/t5452.scala | 29 ++++++++++++++++++++++++++++ test/disabled/pos/spurious-overload.scala | 32 +++++++++++++++++++++++++++++++ 3 files changed, 69 insertions(+) create mode 100644 test/disabled/neg/t5452.check create mode 100644 test/disabled/neg/t5452.scala create mode 100644 test/disabled/pos/spurious-overload.scala diff --git a/test/disabled/neg/t5452.check b/test/disabled/neg/t5452.check new file mode 100644 index 0000000000..2f35a45509 --- /dev/null +++ b/test/disabled/neg/t5452.check @@ -0,0 +1,8 @@ +t5452.scala:28: error: overloaded method value apply with alternatives: + ()Queryable[CoffeesTable] + (t: Tree)(implicit evidence$2: Manifest[CoffeesTable])Nothing + (implicit evidence$1: Manifest[CoffeesTable])Nothing + cannot be applied to (Queryable[CoffeesTable]) + Queryable[CoffeesTable]( q.treeFilter(null) ) + ^ +one error found diff --git a/test/disabled/neg/t5452.scala b/test/disabled/neg/t5452.scala new file mode 100644 index 0000000000..1032db7a4b --- /dev/null +++ b/test/disabled/neg/t5452.scala @@ -0,0 +1,29 @@ +// /scala/trac/5452/a.scala +// Mon Feb 13 22:52:36 PST 2012 + +// import scala.reflect.mirror._ + +trait Tree + +object Bip { + def ??? = sys.error("") +} +import Bip._ + +case class Queryable[T]() { + def treeFilter( t:Tree ) : Queryable[T] = ??? +} + +object Queryable { + def apply[T:Manifest] = ??? + def apply[T:Manifest]( t:Tree ) = ??? +} + +trait CoffeesTable{ + def sales : Int +} + +object Test extends App{ + val q = new Queryable[CoffeesTable] + Queryable[CoffeesTable]( q.treeFilter(null) ) +} diff --git a/test/disabled/pos/spurious-overload.scala b/test/disabled/pos/spurious-overload.scala new file mode 100644 index 0000000000..9767a44eee --- /dev/null +++ b/test/disabled/pos/spurious-overload.scala @@ -0,0 +1,32 @@ +object Test extends App { + def foo(bar: Any) = bar + + val code = foo{ + object lazyLib { + + def delay[A](value: => A): Susp[A] = new SuspImpl[A](value) + + implicit def force[A](s: Susp[A]): A = s() + + abstract class Susp[+A] extends Function0[A] + + class SuspImpl[A](lazyValue: => A) extends Susp[A] { + private var maybeValue: Option[A] = None + + override def apply() = maybeValue match { + case None => + val value = lazyValue + maybeValue = Some(value) + value + case Some(value) => + value + } + } + } + + import lazyLib._ + + val s: Susp[Int] = delay { println("evaluating..."); 3 } + println("2 + s = " + (2 + s)) // implicit call to force() + } +} \ No newline at end of file -- cgit v1.2.3 From 883b7442b90eb2b1184e9d33cb511a24c507fdaf Mon Sep 17 00:00:00 2001 From: Szabolcs Berecz Date: Tue, 31 Jan 2012 21:53:55 +0100 Subject: test to check for proper synchronization in generated code --- test/files/run/synchronized.check | 128 +++++++++++ test/files/run/synchronized.flags | 1 + test/files/run/synchronized.scala | 449 ++++++++++++++++++++++++++++++++++++++ 3 files changed, 578 insertions(+) create mode 100644 test/files/run/synchronized.check create mode 100644 test/files/run/synchronized.flags create mode 100644 test/files/run/synchronized.scala diff --git a/test/files/run/synchronized.check b/test/files/run/synchronized.check new file mode 100644 index 0000000000..dd9f4ef424 --- /dev/null +++ b/test/files/run/synchronized.check @@ -0,0 +1,128 @@ + .|. c1.f1: OK + .|. c1.fi: OK + .|... c1.fv: OK + .|... c1.ff: OK + .|. c1.fl: OK + .|... c1.fo: OK + |.. c1.g1: OK + |.. c1.gi: OK + |.... c1.gv: OK + |..... c1.gf: OK + .|... c1.c.f1: OK + .|... c1.c.fi: OK + .|..... c1.c.fv: OK + .|..... c1.c.ff: OK + .|... c1.c.fl: OK + .|..... c1.c.fo: OK + .|... c1.c.fn: OK + |.... c1.c.g1: OK + |.... c1.c.gi: OK + |...... c1.c.gv: OK + |...... c1.c.gf: OK + .|... c1.O.f1: OK + .|... c1.O.fi: OK + .|..... c1.O.fv: OK + .|..... c1.O.ff: OK + .|... c1.O.fl: OK + .|..... c1.O.fo: OK + .|... c1.O.fn: OK + |.... c1.O.g1: OK + |.... c1.O.gi: OK + |...... c1.O.gv: OK + |...... c1.O.gf: OK + .|. O1.f1: OK + .|. O1.fi: OK + .|... O1.fv: OK + .|... O1.ff: OK + .|. O1.fl: OK + .|... O1.fo: OK + |.. O1.g1: OK + |.. O1.gi: OK + |.... O1.gv: OK + |.... O1.gf: OK + .|... O1.c.f1: OK + .|... O1.c.fi: OK + .|..... O1.c.fv: OK + .|..... O1.c.ff: OK + .|... O1.c.fl: OK + .|..... O1.c.fo: OK + .|... O1.c.fn: OK + |.... O1.c.g1: OK + |.... O1.c.gi: OK + |...... O1.c.gv: OK + |...... O1.c.gf: OK + .|... O1.O.f1: OK + .|... O1.O.fi: OK + .|..... O1.O.fv: OK + .|..... O1.O.ff: OK + .|... O1.O.fl: OK + .|..... O1.O.fo: OK + .|... O1.O.fn: OK + |.... O1.O.g1: OK + |.... O1.O.gi: OK + |...... O1.O.gv: OK + |...... O1.O.gf: OK + .|..... c2.f1: OK + .|..... c2.fi: OK + .|....... c2.fv: OK + .|....... c2.ff: OK + .|..... c2.fl: OK + .|....... c2.fo: OK + |....... c2.g1: OK + |....... c2.gi: OK + |......... c2.gv: OK + |......... c2.gf: OK + .|........ c2.c.f1: OK + .|........ c2.c.fi: OK + .|.......... c2.c.fv: OK + .|.......... c2.c.ff: OK + .|........ c2.c.fl: OK + .|.......... c2.c.fo: OK + .|....... c2.c.fn: OK + |......... c2.c.g1: OK + |......... c2.c.gi: OK + |........... c2.c.gv: OK + |........... c2.c.gf: OK + .|........ c2.O.f1: OK + .|........ c2.O.fi: OK + .|.......... c2.O.fv: OK + .|.......... c2.O.ff: OK + .|........ c2.O.fl: OK + .|.......... c2.O.fo: OK + .|....... c2.O.fn: OK + |......... c2.O.g1: OK + |......... c2.O.gi: OK + |........... c2.O.gv: OK + |........... c2.O.gf: OK + .|..... O2.f1: OK + .|..... O2.fi: OK + .|....... O2.fv: OK + .|....... O2.ff: OK + .|..... O2.fl: OK + .|....... O2.fo: OK + |....... O2.g1: OK + |....... O2.gi: OK + |......... O2.gv: OK + |......... O2.gf: OK + .|........ O2.c.f1: OK + .|........ O2.c.fi: OK + .|.......... O2.c.fv: OK + .|.......... O2.c.ff: OK + .|........ O2.c.fl: OK + .|.......... O2.c.fo: OK + .|....... O2.c.fn: OK + |......... O2.c.g1: OK + |......... O2.c.gi: OK + |........... O2.c.gv: OK + |........... O2.c.gf: OK + .|........ O2.O.f1: OK + .|........ O2.O.fi: OK + .|.......... O2.O.fv: OK + .|.......... O2.O.ff: OK + .|........ O2.O.fl: OK + .|.......... O2.O.fo: OK + .|....... O2.O.fn: OK + |......... O2.O.g1: OK + |......... O2.O.gi: OK + |........... O2.O.gv: OK + |........... O2.O.gf: OK diff --git a/test/files/run/synchronized.flags b/test/files/run/synchronized.flags new file mode 100644 index 0000000000..1182725e86 --- /dev/null +++ b/test/files/run/synchronized.flags @@ -0,0 +1 @@ +-optimize \ No newline at end of file diff --git a/test/files/run/synchronized.scala b/test/files/run/synchronized.scala new file mode 100644 index 0000000000..1f0e32992b --- /dev/null +++ b/test/files/run/synchronized.scala @@ -0,0 +1,449 @@ +import java.lang.Thread.holdsLock +import scala.collection.mutable.StringBuilder + +object Util { + def checkLocks(held: AnyRef*)(notHeld: AnyRef*) = { + val sb = new StringBuilder + for (lock <- held) { + sb.append(if (holdsLock(lock)) '.' else '!') + } + print("%5s|" format sb) + + sb.clear() + for (lock <- notHeld) { + sb.append(if (holdsLock(lock)) '!' else '.') + } + print("%-15s " format sb) + + (held forall holdsLock) && !(notHeld exists holdsLock) + } +} + +class C1 { + import Util._ + + val lock = new AnyRef + + def f1 = synchronized { checkLocks(this)(this.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass) + @inline final def gi = checkLocks()(this, this.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(C1.this, gfv, gfv.getClass, lock, lock.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass) + glv + } + + class C { + def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass, fv, fv.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, C1.this, C1.this.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) } + def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, C1.this, C1.this.getClass, gv, gv.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, C1.this, C1.this.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + glv + } + } + val c = new C + + object O { + def f1 = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, C1.this, C1.this.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, C1.this, C1.this.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, C1.this, C1.this.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, C1.this, C1.this.getClass) } + def fn = C1.this.synchronized { checkLocks(C1.this)(C1.this.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + @inline final def gi = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, C1.this, C1.this.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, C1.this, C1.this.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, C1.this, C1.this.getClass) + glv + } + } +} + +object O1 { + import Util._ + + val lock = new AnyRef + + def f1 = synchronized { checkLocks(this)(this.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass) + @inline final def gi = checkLocks()(this, this.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass) + glv + } + + class C { + def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, O1, O1.getClass, fv, fv.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, O1, O1.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) } + def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass, O1, O1.getClass) + @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, O1, O1.getClass, gv, gv.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, O1, O1.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass) + glv + } + } + val c = new C + + object O { + def f1 = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, O1, O1.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, O1, O1.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, O1, O1.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, O1, O1.getClass) } + def fn = O1.synchronized { checkLocks(O1)(O1.getClass, this, this.getClass) } + + def g1 = checkLocks()(this, this.getClass, O1, O1.getClass) + @inline final def gi = checkLocks()(this, this.getClass, O1, O1.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, O1, O1.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, O1, O1.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, O1, O1.getClass) + glv + } + } +} + +trait T { + import Util._ + + val Tclass = Class.forName("T$class") + + val lock = new AnyRef + + def f1 = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + + def g1 = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + @inline final def gi = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) + glv + } + + class C { + def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, fv, fv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(ffv, ffv.getClass, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + + def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, T.this, T.this.getClass, gv, gv.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(gfv, gfv.getClass, lock, lock.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + glv + } + } + val c = new C + + object O { + def f1 = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + @inline final def fi = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + val fv: () => Boolean = () => synchronized { checkLocks(this)(this.getClass, fv, fv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + def ff = { + lazy val ffv: AnyRef => Boolean = lock => synchronized { checkLocks(lock)(lock.getClass, ffv, ffv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + ffv(this) + } + def fl = { + lazy val flv = synchronized { checkLocks(this)(this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + flv + } + def fo = lock.synchronized { checkLocks(lock)(lock.getClass, this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) } + def fn = T.this.synchronized { checkLocks(T.this)(T.this.getClass, this, this.getClass, classOf[T], Tclass, classOf[C2], O2.getClass) } + + def g1 = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + @inline final def gi = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + val gv: () => Boolean = () => checkLocks()(this, this.getClass, gv, gv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + def gf = { + lazy val gfv: AnyRef => Boolean = lock => checkLocks()(lock, lock.getClass, gfv, gfv.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + gfv(this) + } + def gl = { + lazy val glv = checkLocks()(this, this.getClass, T.this, T.this.getClass, classOf[T], Tclass, classOf[C2], O2, O2.getClass) + glv + } + } +} + +class C2 extends T +object O2 extends T + +object Test extends App { + def check(name: String, result: Boolean) { + println("%-10s %s" format (name +":", if (result) "OK" else "FAILED")) + } + + val c1 = new C1 + check("c1.f1", c1.f1) + check("c1.fi", c1.fi) + check("c1.fv", c1.fv()) + check("c1.ff", c1.ff) + check("c1.fl", c1.fl) + check("c1.fo", c1.fo) + check("c1.g1", c1.g1) + check("c1.gi", c1.gi) + check("c1.gv", c1.gv()) + check("c1.gf", c1.gf) +// check("c1.gl", c1.gl) // FIXME *.gl are failing because of the issue described in SUGGEST-11 + + check("c1.c.f1", c1.c.f1) + check("c1.c.fi", c1.c.fi) + check("c1.c.fv", c1.c.fv()) + check("c1.c.ff", c1.c.ff) + check("c1.c.fl", c1.c.fl) + check("c1.c.fo", c1.c.fo) + check("c1.c.fn", c1.c.fn) + check("c1.c.g1", c1.c.g1) + check("c1.c.gi", c1.c.gi) + check("c1.c.gv", c1.c.gv()) + check("c1.c.gf", c1.c.gf) +// check("c1.c.gl", c1.c.gl) + + check("c1.O.f1", c1.O.f1) + check("c1.O.fi", c1.O.fi) + check("c1.O.fv", c1.O.fv()) + check("c1.O.ff", c1.O.ff) + check("c1.O.fl", c1.O.fl) + check("c1.O.fo", c1.O.fo) + check("c1.O.fn", c1.O.fn) + check("c1.O.g1", c1.O.g1) + check("c1.O.gi", c1.O.gi) + check("c1.O.gv", c1.O.gv()) + check("c1.O.gf", c1.O.gf) +// check("c1.O.gl", c1.O.gl) + + check("O1.f1", O1.f1) + check("O1.fi", O1.fi) + check("O1.fv", O1.fv()) + check("O1.ff", O1.ff) + check("O1.fl", O1.fl) + check("O1.fo", O1.fo) + check("O1.g1", O1.g1) + check("O1.gi", O1.gi) + check("O1.gv", O1.gv()) + check("O1.gf", O1.gf) +// check("O1.gl", O1.gl) + + check("O1.c.f1", O1.c.f1) + check("O1.c.fi", O1.c.fi) + check("O1.c.fv", O1.c.fv()) + check("O1.c.ff", O1.c.ff) + check("O1.c.fl", O1.c.fl) + check("O1.c.fo", O1.c.fo) + check("O1.c.fn", O1.c.fn) + check("O1.c.g1", O1.c.g1) + check("O1.c.gi", O1.c.gi) + check("O1.c.gv", O1.c.gv()) + check("O1.c.gf", O1.c.gf) +// check("O1.c.gl", O1.c.gl) + + check("O1.O.f1", O1.O.f1) + check("O1.O.fi", O1.O.fi) + check("O1.O.fv", O1.O.fv()) + check("O1.O.ff", O1.O.ff) + check("O1.O.fl", O1.O.fl) + check("O1.O.fo", O1.O.fo) + check("O1.O.fn", O1.O.fn) + check("O1.O.g1", O1.O.g1) + check("O1.O.gi", O1.O.gi) + check("O1.O.gv", O1.O.gv()) + check("O1.O.gf", O1.O.gf) +// check("O1.O.gl", O1.O.gl) + + val c2 = new C2 + check("c2.f1", c2.f1) + check("c2.fi", c2.fi) + check("c2.fv", c2.fv()) + check("c2.ff", c2.ff) + check("c2.fl", c2.fl) + check("c2.fo", c2.fo) + check("c2.g1", c2.g1) + check("c2.gi", c2.gi) + check("c2.gv", c2.gv()) + check("c2.gf", c2.gf) +// check("c2.gl", c2.gl) + + check("c2.c.f1", c2.c.f1) + check("c2.c.fi", c2.c.fi) + check("c2.c.fv", c2.c.fv()) + check("c2.c.ff", c2.c.ff) + check("c2.c.fl", c2.c.fl) + check("c2.c.fo", c2.c.fo) + check("c2.c.fn", c2.c.fn) + check("c2.c.g1", c2.c.g1) + check("c2.c.gi", c2.c.gi) + check("c2.c.gv", c2.c.gv()) + check("c2.c.gf", c2.c.gf) +// check("c2.c.gl", c2.c.gl) + + check("c2.O.f1", c2.O.f1) + check("c2.O.fi", c2.O.fi) + check("c2.O.fv", c2.O.fv()) + check("c2.O.ff", c2.O.ff) + check("c2.O.fl", c2.O.fl) + check("c2.O.fo", c2.O.fo) + check("c2.O.fn", c2.O.fn) + check("c2.O.g1", c2.O.g1) + check("c2.O.gi", c2.O.gi) + check("c2.O.gv", c2.O.gv()) + check("c2.O.gf", c2.O.gf) +// check("c2.O.gl", c2.O.gl) + + check("O2.f1", O2.f1) + check("O2.fi", O2.fi) + check("O2.fv", O2.fv()) + check("O2.ff", O2.ff) + check("O2.fl", O2.fl) + check("O2.fo", O2.fo) + check("O2.g1", O2.g1) + check("O2.gi", O2.gi) + check("O2.gv", O2.gv()) + check("O2.gf", O2.gf) +// check("O2.gl", O2.gl) + + check("O2.c.f1", O2.c.f1) + check("O2.c.fi", O2.c.fi) + check("O2.c.fv", O2.c.fv()) + check("O2.c.ff", O2.c.ff) + check("O2.c.fl", O2.c.fl) + check("O2.c.fo", O2.c.fo) + check("O2.c.fn", O2.c.fn) + check("O2.c.g1", O2.c.g1) + check("O2.c.gi", O2.c.gi) + check("O2.c.gv", O2.c.gv()) + check("O2.c.gf", O2.c.gf) +// check("O2.c.gl", O2.c.gl) + + check("O2.O.f1", O2.O.f1) + check("O2.O.fi", O2.O.fi) + check("O2.O.fv", O2.O.fv()) + check("O2.O.ff", O2.O.ff) + check("O2.O.fl", O2.O.fl) + check("O2.O.fo", O2.O.fo) + check("O2.O.fn", O2.O.fn) + check("O2.O.g1", O2.O.g1) + check("O2.O.gi", O2.O.gi) + check("O2.O.gv", O2.O.gv()) + check("O2.O.gf", O2.O.gf) +// check("O2.O.gl", O2.O.gl) +} \ No newline at end of file -- cgit v1.2.3 From 18559c4dc530b7d930295cfd9dd0704a2b370b4e Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Tue, 14 Feb 2012 16:44:42 -0200 Subject: Close file descriptor leak in sys.process. This closes most file descriptor leaks in sys.process through the simple expedient of making sure every InputStream being read by BasicIO is closed once there's nothing more to read. A single file descriptor leak would remain for the OutputStream (that is, that process stdin) of each Process, which is closed after the InputStream being read to feed it is closed. Special care is taken not to close the calling process stdin. Fix an additional non-reported by where sending data to a process that had already terminated would result in an exception being thrown. File descriptors can still leak in some conditions that must be handled by user code. Documentation to that effect will follow. Closes SI-5439. --- src/library/scala/sys/process/BasicIO.scala | 18 ++++++++++++------ 1 file changed, 12 insertions(+), 6 deletions(-) diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 44e573896f..010a20b1dc 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -13,6 +13,7 @@ import processInternal._ import java.io.{ BufferedReader, InputStreamReader, FilterInputStream, FilterOutputStream } import java.util.concurrent.LinkedBlockingQueue import scala.collection.immutable.Stream +import scala.annotation.tailrec /** * This object contains factories for [[scala.sys.process.ProcessIO]], @@ -74,6 +75,7 @@ object BasicIO { def processFully(processLine: String => Unit): InputStream => Unit = in => { val reader = new BufferedReader(new InputStreamReader(in)) processLinesFully(processLine)(reader.readLine) + reader.close() } def processLinesFully(processLine: String => Unit)(readLine: () => String) { @@ -86,8 +88,11 @@ object BasicIO { } readFully() } - def connectToIn(o: OutputStream): Unit = transferFully(stdin, o) - def input(connect: Boolean): OutputStream => Unit = if (connect) connectToIn else _ => () + def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o) + def input(connect: Boolean): OutputStream => Unit = { outputToProcess => + if (connect) connectToIn(outputToProcess) + outputToProcess.close() + } def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput)) def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr) @@ -105,13 +110,14 @@ object BasicIO { private[this] def transferFullyImpl(in: InputStream, out: OutputStream) { val buffer = new Array[Byte](BufferSize) - def loop() { + @tailrec def loop() { val byteCount = in.read(buffer) if (byteCount > 0) { out.write(buffer, 0, byteCount) - out.flush() - loop() - } + // flush() will throw an exception once the process has terminated + val available = try { out.flush(); true } catch { case _: IOException => false } + if (available) loop() else in.close() + } else in.close() } loop() } -- cgit v1.2.3 From 838c97bcb55908aff3638caae0aa87d237100b4b Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Wed, 15 Feb 2012 20:02:23 -0200 Subject: Major rewrite of sys.process documentation. Document the stream closing requisite on ProcessIO, document a few stream closing semantics on BasicIO, and then take advantage of my newly-found knowledge and go on a rampage all over sys.process. Also make two methods that were implemented but not present in the public API visible. --- src/library/scala/sys/process/BasicIO.scala | 111 ++++++++ src/library/scala/sys/process/Process.scala | 67 +++-- src/library/scala/sys/process/ProcessBuilder.scala | 306 +++++++++++++++------ src/library/scala/sys/process/ProcessIO.scala | 49 +++- src/library/scala/sys/process/ProcessLogger.scala | 26 +- src/library/scala/sys/process/package.scala | 212 +++++++++++--- 6 files changed, 613 insertions(+), 158 deletions(-) diff --git a/src/library/scala/sys/process/BasicIO.scala b/src/library/scala/sys/process/BasicIO.scala index 010a20b1dc..5b7244e98e 100644 --- a/src/library/scala/sys/process/BasicIO.scala +++ b/src/library/scala/sys/process/BasicIO.scala @@ -20,9 +20,18 @@ import scala.annotation.tailrec * which can be used to control the I/O of a [[scala.sys.process.Process]] * when a [[scala.sys.process.ProcessBuilder]] is started with the `run` * command. + * + * It also contains some helper methods that can be used to in the creation of + * `ProcessIO`. + * + * It is used by other classes in the package in the implementation of various + * features, but can also be used by client code. */ object BasicIO { + /** Size of the buffer used in all the functions that copy data */ final val BufferSize = 8192 + + /** Used to separate lines in the `processFully` function that takes `Appendable`. */ final val Newline = props("line.separator") private[process] final class Streamed[T]( @@ -53,15 +62,70 @@ object BasicIO { def protect(out: OutputStream): OutputStream = if ((out eq stdout) || (out eq stderr)) Uncloseable(out) else out } + /** Creates a `ProcessIO` from a function `String => Unit`. It can attach the + * process input to stdin, and it will either send the error stream to + * stderr, or to a `ProcessLogger`. + * + * For example, the `ProcessIO` created below will print all normal output + * while ignoring all error output. No input will be provided. + * {{{ + * import scala.sys.process.BasicIO + * val errToDevNull = BasicIO(false, println(_), None) + * }}} + * + * @param withIn True if the process input should be attached to stdin. + * @param output A function that will be called with the process output. + * @param log An optional `ProcessLogger` to which the output should be + * sent. If `None`, output will be sent to stderr. + * @return A `ProcessIO` with the characteristics above. + */ def apply(withIn: Boolean, output: String => Unit, log: Option[ProcessLogger]) = new ProcessIO(input(withIn), processFully(output), getErr(log)) + /** Creates a `ProcessIO` that appends its output to a `StringBuffer`. It can + * attach the process input to stdin, and it will either send the error + * stream to stderr, or to a `ProcessLogger`. + * + * For example, the `ProcessIO` created by the function below will store the + * normal output on the buffer provided, and print all error on stderr. The + * input will be read from stdin. + * {{{ + * import scala.sys.process.{BasicIO, ProcessLogger} + * val printer = ProcessLogger(println(_)) + * def appendToBuffer(b: StringBuffer) = BasicIO(true, b, Some(printer)) + * }}} + * + * @param withIn True if the process input should be attached to stdin. + * @param buffer A `StringBuffer` which will receive the process normal + * output. + * @param log An optional `ProcessLogger` to which the output should be + * sent. If `None`, output will be sent to stderr. + * @return A `ProcessIO` with the characteristics above. + */ def apply(withIn: Boolean, buffer: StringBuffer, log: Option[ProcessLogger]) = new ProcessIO(input(withIn), processFully(buffer), getErr(log)) + /** Creates a `ProcessIO` from a `ProcessLogger` . It can attach the + * process input to stdin. + * + * @param withIn True if the process input should be attached to stdin. + * @param log A `ProcessLogger` to receive all output, normal and error. + * @return A `ProcessIO` with the characteristics above. + */ def apply(withIn: Boolean, log: ProcessLogger) = new ProcessIO(input(withIn), processOutFully(log), processErrFully(log)) + /** Returns a function `InputStream => Unit` given an optional + * `ProcessLogger`. If no logger is passed, the function will send the output + * to stderr. This function can be used to create a + * [[scala.sys.process.ProcessIO]]. + * + * @param log An optional `ProcessLogger` to which the contents of + * the `InputStream` will be sent. + * @return A function `InputStream => Unit` (used by + * [[scala.sys.process.ProcessIO]]) which will send the data to + * either the provided `ProcessLogger` or, if `None`, to stderr. + */ def getErr(log: Option[ProcessLogger]) = log match { case Some(lg) => processErrFully(lg) case None => toStdErr @@ -70,14 +134,40 @@ object BasicIO { private def processErrFully(log: ProcessLogger) = processFully(log err _) private def processOutFully(log: ProcessLogger) = processFully(log out _) + /** Closes a `Closeable` without throwing an exception */ def close(c: Closeable) = try c.close() catch { case _: IOException => () } + + /** Returns a function `InputStream => Unit` that appends all data read to the + * provided `Appendable`. This function can be used to create a + * [[scala.sys.process.ProcessIO]]. The buffer will be appended line by line. + * + * @param buffer An `Appendable` such as `StringBuilder` or `StringBuffer`. + * @return A function `InputStream => Unit` (used by + * [[scala.sys.process.ProcessIO]] which will append all data read + * from the stream to the buffer. + */ def processFully(buffer: Appendable): InputStream => Unit = processFully(appendLine(buffer)) + + /** Returns a function `InputStream => Unit` that will call the passed + * function with all data read. This function can be used to create a + * [[scala.sys.process.ProcessIO]]. The `processLine` function will be called + * with each line read, and `Newline` will be appended after each line. + * + * @param processLine A function that will be called with all data read from + * the stream. + * @return A function `InputStream => Unit` (used by + * [[scala.sys.process.ProcessIO]] which will call `processLine` + * with all data read from the stream. + */ def processFully(processLine: String => Unit): InputStream => Unit = in => { val reader = new BufferedReader(new InputStreamReader(in)) processLinesFully(processLine)(reader.readLine) reader.close() } + /** Calls `processLine` with the result of `readLine` until the latter returns + * `null`. + */ def processLinesFully(processLine: String => Unit)(readLine: () => String) { def readFully() { val line = readLine() @@ -88,17 +178,38 @@ object BasicIO { } readFully() } + + /** Copy contents of stdin to the `OutputStream`. */ def connectToIn(o: OutputStream): Unit = transferFully(Uncloseable protect stdin, o) + + /** Returns a function `OutputStream => Unit` that either reads the content + * from stdin or does nothing. This function can be used by + * [[scala.sys.process.ProcessIO]]. + */ def input(connect: Boolean): OutputStream => Unit = { outputToProcess => if (connect) connectToIn(outputToProcess) outputToProcess.close() } + + /** Returns a `ProcessIO` connected to stdout and stderr, and, optionally, stdin. */ def standard(connectInput: Boolean): ProcessIO = standard(input(connectInput)) + + /** Retruns a `ProcessIO` connected to stdout, stderr and the provided `in` */ def standard(in: OutputStream => Unit): ProcessIO = new ProcessIO(in, toStdOut, toStdErr) + /** Send all the input from the stream to stderr, and closes the input stream + * afterwards. + */ def toStdErr = (in: InputStream) => transferFully(in, stderr) + + /** Send all the input from the stream to stdout, and closes the input stream + * afterwards. + */ def toStdOut = (in: InputStream) => transferFully(in, stdout) + /** Copy all input from the input stream to the output stream. Closes the + * input stream once it's all read. + */ def transferFully(in: InputStream, out: OutputStream): Unit = try transferFullyImpl(in, out) catch onInterrupt(()) diff --git a/src/library/scala/sys/process/Process.scala b/src/library/scala/sys/process/Process.scala index b8765aa615..c2a61af936 100644 --- a/src/library/scala/sys/process/Process.scala +++ b/src/library/scala/sys/process/Process.scala @@ -13,7 +13,7 @@ import processInternal._ import ProcessBuilder._ /** Represents a process that is running or has finished running. - * It may be a compound process with several underlying native processes (such as 'a #&& b`). + * It may be a compound process with several underlying native processes (such as `a #&& b`). * * This trait is often not used directly, though its companion object contains * factories for [[scala.sys.process.ProcessBuilder]], the main component of this @@ -42,28 +42,28 @@ object Process extends ProcessImpl with ProcessCreation { } * found on and used through [[scala.sys.process.Process]]'s companion object. */ trait ProcessCreation { - /** Create a [[scala.sys.process.ProcessBuilder]] from a `String`, including the + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String`, including the * parameters. * * @example {{{ apply("cat file.txt") }}} */ def apply(command: String): ProcessBuilder = apply(command, None) - /** Create a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`, + /** Creates a [[scala.sys.process.ProcessBuilder]] from a sequence of `String`, * where the head is the command and each element of the tail is a parameter. * * @example {{{ apply("cat" :: files) }}} */ def apply(command: Seq[String]): ProcessBuilder = apply(command, None) - /** Create a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`, + /** Creates a [[scala.sys.process.ProcessBuilder]] from a command represented by a `String`, * and a sequence of `String` representing the arguments. * * @example {{{ apply("cat", files) }}} */ def apply(command: String, arguments: Seq[String]): ProcessBuilder = apply(command +: arguments, None) - /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra + /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra * environment variables. * * @example {{{ apply("java", new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}} @@ -71,7 +71,7 @@ trait ProcessCreation { def apply(command: String, cwd: File, extraEnv: (String, String)*): ProcessBuilder = apply(command, Some(cwd), extraEnv: _*) - /** Create a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra + /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir set to `File` and extra * environment variables. * * @example {{{ apply("java" :: javaArgs, new java.ioFile("/opt/app"), "CLASSPATH" -> "library.jar") }}} @@ -79,7 +79,7 @@ trait ProcessCreation { def apply(command: Seq[String], cwd: File, extraEnv: (String, String)*): ProcessBuilder = apply(command, Some(cwd), extraEnv: _*) - /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to + /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to * `File` and extra environment variables. * * @example {{{ apply("java", params.get("cwd"), "CLASSPATH" -> "library.jar") }}} @@ -93,7 +93,7 @@ trait ProcessCreation { }*/ } - /** Create a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to + /** Creates a [[scala.sys.process.ProcessBuilder]] with working dir optionally set to * `File` and extra environment variables. * * @example {{{ apply("java" :: javaArgs, params.get("cwd"), "CLASSPATH" -> "library.jar") }}} @@ -105,7 +105,7 @@ trait ProcessCreation { apply(jpb) } - /** create a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`. + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.lang.ProcessBuilder`. * * @example {{{ * apply((new java.lang.ProcessBuilder("ls", "-l")) directory new java.io.File(System.getProperty("user.home"))) @@ -113,19 +113,19 @@ trait ProcessCreation { */ def apply(builder: JProcessBuilder): ProcessBuilder = new Simple(builder) - /** create a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.io.File`. This * `ProcessBuilder` can then be used as a `Source` or a `Sink`, so one can * pipe things from and to it. */ def apply(file: File): FileBuilder = new FileImpl(file) - /** Create a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `java.net.URL`. This * `ProcessBuilder` can then be used as a `Source`, so that one can pipe things * from it. */ def apply(url: URL): URLBuilder = new URLImpl(url) - /** Create a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element. + /** Creates a [[scala.sys.process.ProcessBuilder]] from a Scala XML Element. * This can be used as a way to template strings. * * @example {{{ @@ -134,23 +134,23 @@ trait ProcessCreation { */ def apply(command: scala.xml.Elem): ProcessBuilder = apply(command.text.trim) - /** Create a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `Boolean`. This can be * to force an exit value. */ def apply(value: Boolean): ProcessBuilder = apply(value.toString, if (value) 0 else 1) - /** Create a [[scala.sys.process.ProcessBuilder]] from a `String` name and a + /** Creates a [[scala.sys.process.ProcessBuilder]] from a `String` name and a * `Boolean`. This can be used to force an exit value, with the name being * used for `toString`. */ def apply(name: String, exitValue: => Int): ProcessBuilder = new Dummy(name, exitValue) - /** Create a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of + /** Creates a sequence of [[scala.sys.process.ProcessBuilder.Source]] from a sequence of * something else for which there's an implicit conversion to `Source`. */ def applySeq[T](builders: Seq[T])(implicit convert: T => Source): Seq[Source] = builders.map(convert) - /** Create a [[scala.sys.process.ProcessBuilder]] from one or more + /** Creates a [[scala.sys.process.ProcessBuilder]] from one or more * [[scala.sys.process.ProcessBuilder.Source]], which can then be * piped to something else. * @@ -170,7 +170,7 @@ trait ProcessCreation { */ def cat(file: Source, files: Source*): ProcessBuilder = cat(file +: files) - /** Create a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence + /** Creates a [[scala.sys.process.ProcessBuilder]] from a non-empty sequence * of [[scala.sys.process.ProcessBuilder.Source]], which can then be * piped to something else. * @@ -198,18 +198,41 @@ trait ProcessImplicits { /** Implicitly convert a `java.lang.ProcessBuilder` into a Scala one. */ implicit def builderToProcess(builder: JProcessBuilder): ProcessBuilder = apply(builder) - /** Implicitly convert a `java.io.File` into a [[scala.sys.process.ProcessBuilder]] */ + /** Implicitly convert a `java.io.File` into a + * [[scala.sys.process.ProcessBuilder.FileBuilder]], which can be used as + * either input or output of a process. For example: + * {{{ + * import scala.sys.process._ + * "ls" #> new java.io.File("dirContents.txt") ! + * }}} + */ implicit def fileToProcess(file: File): FileBuilder = apply(file) - /** Implicitly convert a `java.net.URL` into a [[scala.sys.process.ProcessBuilder]] */ + /** Implicitly convert a `java.net.URL` into a + * [[scala.sys.process.ProcessBuilder.URLBuilder]] , which can be used as + * input to a process. For example: + * {{{ + * import scala.sys.process._ + * Seq("xmllint", "--html", "-") #< new java.net.URL("http://www.scala-lang.org") #> new java.io.File("fixed.html") ! + * }}} + */ implicit def urlToProcess(url: URL): URLBuilder = apply(url) - /** Implicitly convert a [[scala.xml.Elem]] into a [[scala.sys.process.ProcessBuilder]] */ + /** Implicitly convert a [[scala.xml.Elem]] into a + * [[scala.sys.process.ProcessBuilder]]. This is done by obtaining the text + * elements of the element, trimming spaces, and then converting the result + * from string to a process. Importantly, tags are completely ignored, so + * they cannot be used to separate parameters. + */ implicit def xmlToProcess(command: scala.xml.Elem): ProcessBuilder = apply(command) - /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]] */ + /** Implicitly convert a `String` into a [[scala.sys.process.ProcessBuilder]]. */ implicit def stringToProcess(command: String): ProcessBuilder = apply(command) - /** Implicitly convert a sequence of `String` into a [[scala.sys.process.ProcessBuilder]] */ + /** Implicitly convert a sequence of `String` into a + * [[scala.sys.process.ProcessBuilder]]. The first argument will be taken to + * be the command to be executed, and the remaining will be its arguments. + * When using this, arguments may contain spaces. + */ implicit def stringSeqToProcess(command: Seq[String]): ProcessBuilder = apply(command) } diff --git a/src/library/scala/sys/process/ProcessBuilder.scala b/src/library/scala/sys/process/ProcessBuilder.scala index 214d908012..20270d423f 100644 --- a/src/library/scala/sys/process/ProcessBuilder.scala +++ b/src/library/scala/sys/process/ProcessBuilder.scala @@ -12,133 +12,265 @@ package process import processInternal._ import ProcessBuilder._ -/** Represents a runnable process. +/** Represents a sequence of one or more external processes that can be + * executed. A `ProcessBuilder` can be a single external process, or a + * combination of other `ProcessBuilder`. One can control where a + * the output of an external process will go to, and where its input will come + * from, or leave that decision to whoever starts it. * - * This is the main component of this package. A `ProcessBuilder` may be composed with - * others, either concatenating their outputs or piping them from one to the next, and - * possibly with conditional execution depending on the last process exit value. + * One creates a `ProcessBuilder` through factories provided in + * [[scala.sys.process.Process]]'s companion object, or implicit conversions + * based on these factories made available in the package object + * [[scala.sys.process]]. Here are some examples: + * {{{ + * import.scala.sys.process._ * - * Once executed, one can retrieve the output or redirect it to a - * [[scala.sys.process.ProcessLogger]], or one can get the exit value, discarding or - * redirecting the output. + * // Executes "ls" and sends output to stdout + * "ls".! * - * One creates a `ProcessBuilder` through factories provided in [[scala.sys.process.Process]]'s - * companion object, or implicit conversions based on these factories made available in the - * package object [[scala.sys.process]]. + * // Execute "ls" and assign a `Stream[String]` of its output to "contents". + * // Because [[scala.Predef]] already defines a `lines` method for `String`, + * // we use [[scala.sys.process.Process]]'s object companion to create it. + * val contents = Process("ls").lines * - * Let's examine in detail one example of usage: + * // Here we use a `Seq` to make the parameter whitespace-safe + * def contentsOf(dir: String): String = Seq("ls", dir).!! + * }}} + * + * The methods of `ProcessBuilder` are divided in three categories: the ones that + * combine two `ProcessBuilder` to create a third, the ones that redirect input + * or output of a `ProcessBuilder`, and the ones that execute + * the external processes associated with it. + * + * ==Combining `ProcessBuilder`== + * + * Two existing `ProcessBuilder` can be combined in the following ways: + * + * * They can be executed in parallel, with the output of the first being fed + * as input to the second, like Unix pipes. This is achieved with the `#|` + * method. + * * They can be executed in sequence, with the second starting as soon as + * the first ends. This is done by the `###` method. + * * The execution of the second one can be conditioned by the return code + * (exit status) of the first, either only when it's zero, or only when it's + * not zero. The methods `#&&` and `#||` accomplish these tasks. + * + * ==Redirecting Input/Output== + * + * Though control of input and output can be done when executing the process, + * there's a few methods that create a new `ProcessBuilder` with a + * pre-configured input or output. They are `#<`, `#>` and `#>>`, and may take + * as input either another `ProcessBuilder` (like the pipe described above), or + * something else such as a `java.io.File` or a `java.lang.InputStream`. + * For example: + * {{{ + * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") ! + * }}} + * + * ==Starting Processes== + * + * To execute all external commands associated with a `ProcessBuilder`, one + * may use one of four groups of methods. Each of these methods have various + * overloads and variations to enable further control over the I/O. These + * methods are: + * + * * `run`: the most general method, it returns a + * [[scala.sys.process.Process]] immediately, and the external command + * executes concurrently. + * * `!`: blocks until all external commands exit, and returns the exit code + * of the last one in the chain of execution. + * * `!!`: blocks until all external commands exit, and returns a `String` + * with the output generated. + * * `lines`: returns immediately like `run`, and the output being generared + * is provided through a `Stream[String]`. Getting the next element of that + * `Stream` may block until it becomes available. This method will throw an + * exception if the return code is different than zero -- if this is not + * desired, use the `lines_!` method. + * + * ==Handling Input and Output== + * + * If not specified, the input of the external commands executed with `run` or + * `!` will not be tied to anything, and the output will be redirected to the + * stdout and stderr of the Scala process. For the methods `!!` and `lines`, no + * input will be provided, and the output will be directed according to the + * semantics of these methods. * + * Some methods will cause stdin to be used as input. Output can be controlled + * with a [[scala.sys.process.ProcessLogger]] -- `!!` and `lines` will only + * redirect error output when passed a `ProcessLogger`. If one desires full + * control over input and output, then a [[scala.sys.process.ProcessIO]] can be + * used with `run`. + * + * For example, we could silence the error output from `lines_!` like this: + * {{{ + * val etcFiles = "find /etc" lines_! ProcessLogger(line => ()) + * }}} + * + * ==Extended Example== + * + * Let's examine in detail one example of usage: * {{{ * import scala.sys.process._ * "find src -name *.scala -exec grep null {} ;" #| "xargs test -z" #&& "echo null-free" #|| "echo null detected" ! * }}} - * * Note that every `String` is implicitly converted into a `ProcessBuilder` * through the implicits imported from [[scala.sys.process]]. These `ProcessBuilder` are then * combined in three different ways. * * 1. `#|` pipes the output of the first command into the input of the second command. It - * mirrors a shell pipe (`|`). - * 2. `#&&` conditionally executes the second command if the previous one finished with - * exit value 0. It mirrors shell's `&&`. - * 3. `#||` conditionally executes the third command if the exit value of the previous - * command is different than zero. It mirrors shell's `&&`. - * - * Not shown here, the equivalent of a shell's `;` would be `###`. The reason for this name is - * that `;` is a reserved token in Scala. - * - * Finally, `!` at the end executes the commands, and returns the exit value. If the output - * was desired instead, one could run that with `!!` instead. - * - * If one wishes to execute the commands in background, one can either call `run`, which - * returns a [[scala.sys.process.Process]] from which the exit value can be obtained, or - * `lines`, which returns a [scala.collection.immutable.Stream] of output lines. This throws - * an exception at the end of the `Stream` is the exit value is non-zero. To avoid exceptions, - * one can use `lines_!` instead. - * - * One can also start the commands in specific ways to further control their I/O. Using `!<` to - * start the commands will use the stdin from the current process for them. All methods can - * be used passing a [[scala.sys.process.ProcessLogger]] to capture the output, both stderr and - * stdout. And, when using `run`, one can pass a [[scala.sys.process.ProcessIO]] to control - * stdin, stdout and stderr. - * - * The stdin of a command can be redirected from a `java.io.InputStream`, a `java.io.File`, a - * `java.net.URL` or another `ProcessBuilder` through the method `#<`. Likewise, the stdout - * can be sent to a `java.io.OutputStream`, a `java.io.File` or another `ProcessBuilder` with - * the method `#>`. The method `#>>` can be used to append the output to a `java.io.File`. - * For example: + * mirrors a shell pipe (`|`). + * 1. `#&&` conditionally executes the second command if the previous one finished with + * exit value 0. It mirrors shell's `&&`. + * 1. `#||` conditionally executes the third command if the exit value of the previous + * command is different than zero. It mirrors shell's `&&`. * - * {{{ - * new URL("http://databinder.net/dispatch/About") #> "grep JSON" #>> new File("About_JSON") ! - * }}} + * Finally, `!` at the end executes the commands, and returns the exit value. + * Whatever is printed will be sent to the Scala process standard output. If + * we wanted to caputre it, we could run that with `!!` instead. + * + * Note: though it is not shown above, the equivalent of a shell's `;` would be + * `###`. The reason for this name is that `;` is a reserved token in Scala. */ trait ProcessBuilder extends Source with Sink { - /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is - * sent to the console. If the exit code is non-zero, an exception is thrown.*/ + /** Starts the process represented by this builder, blocks until it exits, and + * returns the output as a String. Standard error is sent to the console. If + * the exit code is non-zero, an exception is thrown. + */ def !! : String - /** Starts the process represented by this builder, blocks until it exits, and returns the output as a String. Standard error is - * sent to the provided ProcessLogger. If the exit code is non-zero, an exception is thrown.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the output as a String. Standard error is sent to the provided + * ProcessLogger. If the exit code is non-zero, an exception is thrown. + */ def !!(log: ProcessLogger): String - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination and then throw an exception. */ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the output as a String. Standard error is sent to the console. If + * the exit code is non-zero, an exception is thrown. The newly started + * process reads from standard input of the current process. + */ + def !!< : String + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the output as a String. Standard error is sent to the provided + * ProcessLogger. If the exit code is non-zero, an exception is thrown. The + * newly started process reads from standard input of the current process. + */ + def !!<(log: ProcessLogger): String + + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. Standard error is sent to the console. If the process exits + * with a non-zero value, the Stream will provide all lines up to termination + * and then throw an exception. + */ def lines: Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ + + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the Stream will provide all lines up + * to termination but will not throw an exception. + */ def lines(log: ProcessLogger): Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the console. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ + + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. Standard error is sent to the console. If the process exits + * with a non-zero value, the Stream will provide all lines up to termination + * but will not throw an exception. + */ def lines_! : Stream[String] - /** Starts the process represented by this builder. The output is returned as a Stream that blocks when lines are not available - * but the process has not completed. Standard error is sent to the provided ProcessLogger. If the process exits with a non-zero value, - * the Stream will provide all lines up to termination but will not throw an exception. */ + + /** Starts the process represented by this builder. The output is returned as + * a Stream that blocks when lines are not available but the process has not + * completed. Standard error is sent to the provided ProcessLogger. If the + * process exits with a non-zero value, the Stream will provide all lines up + * to termination but will not throw an exception. + */ def lines_!(log: ProcessLogger): Stream[String] - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the console.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the exit code. Standard output and error are sent to the console. + */ def ! : Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the exit code. Standard output and error are sent to the given + * ProcessLogger. + */ def !(log: ProcessLogger): Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the console. The newly started process reads from standard input of the current process.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the exit code. Standard output and error are sent to the console. + * The newly started process reads from standard input of the current process. + */ def !< : Int - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger. The newly started process reads from standard input of the current process.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the exit code. Standard output and error are sent to the given + * ProcessLogger. The newly started process reads from standard input of the + * current process. + */ def !<(log: ProcessLogger): Int - /** Starts the process represented by this builder. Standard output and error are sent to the console.*/ + + /** Starts the process represented by this builder. Standard output and error + * are sent to the console.*/ def run(): Process - /** Starts the process represented by this builder. Standard output and error are sent to the given ProcessLogger.*/ + + /** Starts the process represented by this builder. Standard output and error + * are sent to the given ProcessLogger. + */ def run(log: ProcessLogger): Process - /** Starts the process represented by this builder. I/O is handled by the given ProcessIO instance.*/ + + /** Starts the process represented by this builder. I/O is handled by the + * given ProcessIO instance. + */ def run(io: ProcessIO): Process - /** Starts the process represented by this builder. Standard output and error are sent to the console. - * The newly started process reads from standard input of the current process if `connectInput` is true.*/ + + /** Starts the process represented by this builder. Standard output and error + * are sent to the console. The newly started process reads from standard + * input of the current process if `connectInput` is true. + */ def run(connectInput: Boolean): Process - /** Starts the process represented by this builder, blocks until it exits, and returns the exit code. Standard output and error are - * sent to the given ProcessLogger. - * The newly started process reads from standard input of the current process if `connectInput` is true.*/ + + /** Starts the process represented by this builder, blocks until it exits, and + * returns the exit code. Standard output and error are sent to the given + * ProcessLogger. The newly started process reads from standard input of the + * current process if `connectInput` is true. + */ def run(log: ProcessLogger, connectInput: Boolean): Process - /** Constructs a command that runs this command first and then `other` if this command succeeds.*/ + /** Constructs a command that runs this command first and then `other` if this + * command succeeds. + */ def #&& (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that runs this command first and then `other` if this command does not succeed.*/ + + /** Constructs a command that runs this command first and then `other` if this + * command does not succeed. + */ def #|| (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that will run this command and pipes the output to `other`. `other` must be a simple command.*/ + + /** Constructs a command that will run this command and pipes the output to + * `other`. `other` must be a simple command. + */ def #| (other: ProcessBuilder): ProcessBuilder - /** Constructs a command that will run this command and then `other`. The exit code will be the exit code of `other`.*/ + + /** Constructs a command that will run this command and then `other`. The + * exit code will be the exit code of `other`. + */ def ### (other: ProcessBuilder): ProcessBuilder - /** True if this command can be the target of a pipe. - */ + + /** True if this command can be the target of a pipe. */ def canPipeTo: Boolean - /** True if this command has an exit code which should be propagated to the user. - * Given a pipe between A and B, if B.hasExitValue is true then the exit code will - * be the one from B; if it is false, the one from A. This exists to prevent output - * redirections (implemented as pipes) from masking useful process error codes. - */ + /** True if this command has an exit code which should be propagated to the + * user. Given a pipe between A and B, if B.hasExitValue is true then the + * exit code will be the one from B; if it is false, the one from A. This + * exists to prevent output redirections (implemented as pipes) from masking + * useful process error codes. + */ def hasExitValue: Boolean } diff --git a/src/library/scala/sys/process/ProcessIO.scala b/src/library/scala/sys/process/ProcessIO.scala index 261e837a4d..fa0674670f 100644 --- a/src/library/scala/sys/process/ProcessIO.scala +++ b/src/library/scala/sys/process/ProcessIO.scala @@ -11,14 +11,40 @@ package process import processInternal._ -/** This class is used to control the I/O of every [[scala.sys.process.ProcessBuilder]]. - * Most of the time, there is no need to interact with `ProcessIO` directly. However, if - * fine control over the I/O of a `ProcessBuilder` is desired, one can use the factories - * on [[scala.sys.process.BasicIO]] stand-alone object to create one. - * - * Each method will be called in a separate thread. - * If daemonizeThreads is true, they will all be marked daemon threads. - */ +/** This class is used to control the I/O of every + * [[scala.sys.process.Process]]. The functions used to create it will be + * called with the process streams once it has been started. It might not be + * necessary to use `ProcessIO` directly -- + * [[scala.sys.process.ProcessBuilder]] can return the process output to the + * caller, or use a [[scala.sys.process.ProcessLogger]] which avoids direct + * interaction with a stream. One can even use the factories at `BasicIO` to + * create a `ProcessIO`, or use its helper methods when creating one's own + * `ProcessIO`. + * + * When creating a `ProcessIO`, it is important to ''close all streams'' when + * finished, since the JVM might use system resources to capture the process + * input and output, and will not release them unless the streams are + * explicitly closed. + * + * `ProcessBuilder` will call `writeInput`, `processOutput` and `processError` + * in separate threads, and if daemonizeThreads is true, they will all be + * marked as daemon threads. + * + * @param writeInput Function that will be called with the `OutputStream` to + * which all input to the process must be written. This will + * be called in a newly spawned thread. + * @param processOutput Function that will be called with the `InputStream` + * from which all normal output of the process must be + * read from. This will be called in a newly spawned + * thread. + * @param processError Function that will be called with the `InputStream` from + * which all error output of the process must be read from. + * This will be called in a newly spawned thread. + * @param daemonizeThreads Indicates whether the newly spawned threads that + * will run `processOutput`, `processError` and + * `writeInput` should be marked as daemon threads. + * @note Failure to close the passed streams may result in resource leakage. + */ final class ProcessIO( val writeInput: OutputStream => Unit, val processOutput: InputStream => Unit, @@ -27,8 +53,15 @@ final class ProcessIO( ) { def this(in: OutputStream => Unit, out: InputStream => Unit, err: InputStream => Unit) = this(in, out, err, false) + /** Creates a new `ProcessIO` with a different handler for the process input. */ def withInput(write: OutputStream => Unit): ProcessIO = new ProcessIO(write, processOutput, processError, daemonizeThreads) + + /** Creates a new `ProcessIO` with a different handler for the normal output. */ def withOutput(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, process, processError, daemonizeThreads) + + /** Creates a new `ProcessIO` with a different handler for the error output. */ def withError(process: InputStream => Unit): ProcessIO = new ProcessIO(writeInput, processOutput, process, daemonizeThreads) + + /** Creates a new `ProcessIO`, with `daemonizeThreads` true. */ def daemonized(): ProcessIO = new ProcessIO(writeInput, processOutput, processError, true) } diff --git a/src/library/scala/sys/process/ProcessLogger.scala b/src/library/scala/sys/process/ProcessLogger.scala index 67146dd70e..a8241db53c 100644 --- a/src/library/scala/sys/process/ProcessLogger.scala +++ b/src/library/scala/sys/process/ProcessLogger.scala @@ -11,12 +11,26 @@ package process import java.io._ -/** Encapsulates the output and error streams of a running process. - * Many of the methods of `ProcessBuilder` accept a `ProcessLogger` as - * an argument. - * - * @see [[scala.sys.process.ProcessBuilder]] - */ +/** Encapsulates the output and error streams of a running process. This is used + * by [[scala.sys.process.ProcessBuilder]] when starting a process, as an + * alternative to [[scala.sys.process.ProcessIO]], which can be more difficult + * to use. Note that a `ProcessLogger` will be used to create a `ProcessIO` + * anyway. The object `BasicIO` has some functions to do that. + * + * Here is an example that counts the number of lines in the normal and error + * output of a process: + * {{{ + * import scala.sys.process._ + * + * var normalLines = 0 + * var errorLines = 0 + * val countLogger = ProcessLogger(line => normalLines += 1, + * line => errorLines += 1) + * "find /etc" ! countLogger + * }}} + * + * @see [[scala.sys.process.ProcessBuilder]] + */ trait ProcessLogger { /** Will be called with each line read from the process output stream. */ diff --git a/src/library/scala/sys/process/package.scala b/src/library/scala/sys/process/package.scala index 3eb0e5bb89..c1bf470831 100644 --- a/src/library/scala/sys/process/package.scala +++ b/src/library/scala/sys/process/package.scala @@ -11,40 +11,175 @@ // for process debugging output. // package scala.sys { - /** - * This package is used to create process pipelines, similar to Unix command pipelines. + /** This package handles the execution of external processes. The contents of + * this package can be divided in three groups, according to their + * responsibilities: * - * The key concept is that one builds a [[scala.sys.process.Process]] that will run and return an exit - * value. This `Process` is usually composed of one or more [[scala.sys.process.ProcessBuilder]], fed by a - * [[scala.sys.process.ProcessBuilder.Source]] and feeding a [[scala.sys.process.ProcessBuilder.Sink]]. A - * `ProcessBuilder` itself is both a `Source` and a `Sink`. + * - Indicating what to run and how to run it. + * - Handling a process input and output. + * - Running the process. * - * As `ProcessBuilder`, `Sink` and `Source` are abstract, one usually creates them with `apply` methods on - * the companion object of [[scala.sys.process.Process]], or through implicit conversions available in this - * package object from `String` and other types. The pipe is composed through unix-like pipeline and I/O - * redirection operators available on [[scala.sys.process.ProcessBuilder]]. + * For simple uses, the only group that matters is the first one. Running an + * external command can be as simple as `"ls".!`, or as complex as building a + * pipeline of commands such as this: * - * The example below shows how to build and combine such commands. It searches for `null` uses in the `src` - * directory, printing a message indicating whether they were found or not. The first command pipes its - * output to the second command, whose exit value is then used to choose between the third or fourth - * commands. This same example is explained in greater detail on [[scala.sys.process.ProcessBuilder]]. + * {{{ + * import scala.sys.process._ + * "ls" #| "grep .scala" #&& "scalac *.scala" #|| "echo nothing found" lines + * }}} + * + * We describe below the general concepts and architecture of the package, + * and then take a closer look at each of the categories mentioned above. + * + * ==Concepts and Architecture== + * + * The underlying basis for the whole package is Java's `Process` and + * `ProcessBuilder` classes. While there's no need to use these Java classes, + * they impose boundaries on what is possible. One cannot, for instance, + * retrieve a ''process id'' for whatever is executing. + * + * When executing an external process, one can provide a command's name, + * arguments to it, the directory in which it will be executed and what + * environment variables will be set. For each executing process, one can + * feed its standard input through a `java.io.OutputStream`, and read from + * its standard output and standard error through a pair of + * `java.io.InputStream`. One can wait until a process finishes execution and + * then retrieve its return value, or one can kill an executing process. + * Everything else must be built on those features. + * + * This package provides a DSL for running and chaining such processes, + * mimicking Unix shells ability to pipe output from one process to the input + * of another, or control the execution of further processes based on the + * return status of the previous one. + * + * In addition to this DSL, this package also provides a few ways of + * controlling input and output of these processes, going from simple and + * easy to use to complex and flexible. * + * When processes are composed, a new `ProcessBuilder` is created which, when + * run, will execute the `ProcessBuilder` instances it is composed of + * according to the manner of the composition. If piping one process to + * another, they'll be executed simultaneously, and each will be passed a + * `ProcessIO` that will copy the output of one to the input of the other. + * + * ==What to Run and How== + * + * The central component of the process execution DSL is the + * [[scala.sys.process.ProcessBuilder]] trait. It is `ProcessBuilder` that + * implements the process execution DSL, that creates the + * [[scala.sys.process.Process]] that will handle the execution, and return + * the results of such execution to the caller. We can see that DSL in the + * introductory example: `#|`, `#&&` and `#!!` are methods on + * `ProcessBuilder` used to create a new `ProcessBuilder` through + * composition. + * + * One creates a `ProcessBuilder` either through factories on the + * [[scala.sys.process.Process]]'s companion object, or through implicit + * conversions available in this package object itself. Implicitly, each + * process is created either out of a `String`, with arguments separated by + * spaces -- no escaping of spaces is possible -- or out of a + * [[scala.collection.Seq]], where the first element represents the command + * name, and the remaining elements are arguments to it. In this latter case, + * arguments may contain spaces. One can also implicitly convert + * [[scala.xml.Elem]] and `java.lang.ProcessBuilder` into a `ProcessBuilder`. + * In the introductory example, the strings were converted into + * `ProcessBuilder` implicitly. + * + * To further control what how the process will be run, such as specifying + * the directory in which it will be run, see the factories on + * [[scala.sys.process.Process]]'s object companion. + * + * Once the desired `ProcessBuilder` is available, it can be executed in + * different ways, depending on how one desires to control its I/O, and what + * kind of result one wishes for: + * + * - Return status of the process (`!` methods) + * - Output of the process as a `String` (`!!` methods) + * - Continuous output of the process as a `Stream[String]` (`lines` methods) + * - The `Process` representing it (`run` methods) + * + * Some simple examples of these methods: * {{{ * import scala.sys.process._ - * ( - * "find src -name *.scala -exec grep null {} ;" - * #| "xargs test -z" - * #&& "echo null-free" #|| "echo null detected" - * ) ! + * + * // This uses ! to get the exit code + * def fileExists(name: String) = Seq("test", "-f", name).! == 0 + * + * // This uses !! to get the whole result as a string + * val dirContents = "ls".!! + * + * // This "fire-and-forgets" the method, which can be lazily read through + * // a Stream[String] + * def sourceFilesAt(baseDir: String): Stream[String] = { + * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") + * cmd.lines + * } * }}} * - * Other implicits available here are for [[scala.sys.process.ProcessBuilder.FileBuilder]], which extends - * both `Sink` and `Source`, and for [[scala.sys.process.ProcessBuilder.URLBuilder]], which extends - * `Source` alone. + * We'll see more details about controlling I/O of the process in the next + * section. + * + * ==Handling Input and Output== + * + * In the underlying Java model, once a `Process` has been started, one can + * get `java.io.InputStream` and `java.io.OutpuStream` representing its + * output and input respectively. That is, what one writes to an + * `OutputStream` is turned into input to the process, and the output of a + * process can be read from an `InputStream` -- of which there are two, one + * representing normal output, and the other representing error output. + * + * This model creates a difficulty, which is that the code responsible for + * actually running the external processes is the one that has to take + * decisions about how to handle its I/O. + * + * This package presents an alternative model: the I/O of a running process + * is controlled by a [[scala.sys.process.ProcessIO]] object, which can be + * passed _to_ the code that runs the external process. A `ProcessIO` will + * have direct access to the java streams associated with the process I/O. It + * must, however, close these streams afterwards. + * + * Simpler abstractions are available, however. The components of this + * package that handle I/O are: + * + * - [[scala.sys.process.ProcessIO]]: provides the low level abstraction. + * - [[scala.sys.process.ProcessLogger]]: provides a higher level abstraction + * for output, and can be created through its object companion + * - [[scala.sys.process.BasicIO]]: a library of helper methods for the + * creation of `ProcessIO`. + * - This package object itself, with a few implicit conversions. * - * One can even create a `Process` solely out of these, without running any command. For example, this will - * download from a URL to a file: + * Some examples of I/O handling: + * {{{ + * import scala.sys.process._ + * + * // An overly complex way of computing size of a compressed file + * def gzFileSize(name: String) = { + * val cat = Seq("zcat", "name") + * var count = 0 + * def byteCounter(input: java.io.InputStream) = { + * while(input.read() != -1) count += 1 + * input.close() + * } + * cat ! new ProcessIO(_.close(), byteCounter, _.close()) + * count + * } + * + * // This "fire-and-forgets" the method, which can be lazily read through + * // a Stream[String], and accumulates all errors on a StringBuffer + * def sourceFilesAt(baseDir: String): (Stream[String], StringBuffer) = { + * val buffer = new StringBuffer() + * val cmd = Seq("find", baseDir, "-name", "*.scala", "-type", "f") + * val lines = cmd lines_! ProcessLogger(buffer append _) + * (lines, buffer) + * } + * }}} * + * Instances of the java classes `java.io.File` and `java.net.URL` can both + * be used directly as input to other processes, and `java.io.File` can be + * used as output as well. One can even pipe one to the other directly + * without any intervening process, though that's not a design goal or + * recommended usage. For example, the following code will copy a web page to + * a file: * {{{ * import java.io.File * import java.net.URL @@ -52,26 +187,33 @@ package scala.sys { * new URL("http://www.scala-lang.org/") #> new File("scala-lang.html") ! * }}} * - * One may use a `Process` directly through `ProcessBuilder`'s `run` method, which starts the process in - * the background, and returns a `Process`. If background execution is not desired, one can get a - * `ProcessBuilder` to execute through a method such as `!`, `lines`, `run` or variations thereof. That - * will create the `Process` to execute the commands, and return either the exit value or the output, maybe - * throwing an exception. - * - * Finally, when executing a `ProcessBuilder`, one may pass a [[scala.sys.process.ProcessLogger]] to - * capture stdout and stderr of the executing processes. A `ProcessLogger` may be created through its - * companion object from functions of type `(String) => Unit`, or one might redirect it to a file, using - * [[scala.sys.process.FileProcessLogger]], which can also be created through `ProcessLogger`'s object - * companion. + * More information about the other ways of controlling I/O can be looked at + * in the scaladoc for the associated objects, traits and classes. + * + * ==Running the Process== + * + * Paradoxically, this is the simplest component of all, and the one least + * likely to be interacted with. It consists solely of + * [[scala.sys.process.Process]], and it provides only two methods: + * + * - `exitValue()`: blocks until the process exit, and then returns the exit + * value. This is what happens when one uses the `!` method of + * `ProcessBuilder`. + * - `destroy()`: this will kill the external process and close the streams + * associated with it. */ package object process extends ProcessImplicits { + /** The arguments passed to `java` when creating this process */ def javaVmArguments: List[String] = { import collection.JavaConversions._ java.lang.management.ManagementFactory.getRuntimeMXBean().getInputArguments().toList } + /** The input stream of this process */ def stdin = java.lang.System.in + /** The output stream of this process */ def stdout = java.lang.System.out + /** The error stream of this process */ def stderr = java.lang.System.err } // private val shell: String => Array[String] = -- cgit v1.2.3 From 423360f597e20483307457686cee213e089cdd32 Mon Sep 17 00:00:00 2001 From: Erik Osheim Date: Wed, 15 Feb 2012 18:21:38 -0500 Subject: Added test files to verify previous commit. Tests scalac -optimize -Xprint:specialize -Ylog:inliner output to verify that final/@inline + specialization are being handled correctly (that is, the original class' specialized methods should not be final/@inline, but its specialized subclass' should be). This test was written by Vlad Ureche based on the bug report in SI-5005. --- test/files/specialized/SI-5005.check | 33 +++++++++++++++++++++++++++++++++ test/files/specialized/SI-5005.scala | 23 +++++++++++++++++++++++ 2 files changed, 56 insertions(+) create mode 100644 test/files/specialized/SI-5005.check create mode 100644 test/files/specialized/SI-5005.scala diff --git a/test/files/specialized/SI-5005.check b/test/files/specialized/SI-5005.check new file mode 100644 index 0000000000..d2a97512ae --- /dev/null +++ b/test/files/specialized/SI-5005.check @@ -0,0 +1,33 @@ +[[syntax trees at end of specialize]]// Scala source: newSource1 +package { + class C2[@specialized(scala.Boolean) U >: Nothing <: Any] extends Object with ScalaObject { + def (): C2[U] = { + C2.super.(); + () + }; + def apply(x: U): U = x; + def apply$mcZ$sp(x: Boolean): Boolean = C2.this.apply(x.asInstanceOf[U]()).asInstanceOf[Boolean]() + }; + class B extends Object with ScalaObject { + def (): B = { + B.super.(); + () + }; + new C2$mcZ$sp().apply$mcZ$sp(true) + }; + class C2$mcZ$sp extends C2[Boolean] { + def (): C2$mcZ$sp = { + C2$mcZ$sp.super.(); + () + }; + @inline final override def apply(x: Boolean): Boolean = C2$mcZ$sp.this.apply$mcZ$sp(x); + @inline final override def apply$mcZ$sp(x: Boolean): Boolean = x + } +} + +[log inliner] Analyzing C2.apply count 0 with 1 blocks +[log inliner] C2.apply blocks before inlining: 1 (2) after: 1 (2) +[log inliner] Analyzing C2.apply$mcZ$sp count 0 with 1 blocks +[log inliner] C2.apply$mcZ$sp blocks before inlining: 1 (8) after: 1 (8) +[log inliner] Not inlining into apply because it is marked @inline. +[log inliner] Not inlining into apply$mcZ$sp because it is marked @inline. diff --git a/test/files/specialized/SI-5005.scala b/test/files/specialized/SI-5005.scala new file mode 100644 index 0000000000..cc9d327b08 --- /dev/null +++ b/test/files/specialized/SI-5005.scala @@ -0,0 +1,23 @@ +import scala.tools.partest._ +import java.io._ + +object Test extends DirectTest { + + override def extraSettings: String = "-usejavacp -Xprint:spec -optimize -Ylog:inliner -d " + testOutput.path + + override def code = """ + class C2[@specialized(Boolean) U]() { + @inline final def apply(x: U): U = x + } + + class B { + (new C2[Boolean]())(true) + } + """ + + override def show(): Unit = { + // redirect err to out, for inliner log + System.setErr(new PrintStream(System.out)); + compile() + } +} -- cgit v1.2.3