From 910a701fcc93e0663f0a6a15ac11499beb1ca6a9 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 12 Mar 2012 17:58:34 +0100 Subject: SI-5189: refined GADT soundness fix extrapolate GADT skolems: only complicate types when needed make sure we only deskolemize GADT skolems after typedCase --- test/files/neg/t3015.check | 2 +- test/files/neg/t3481.check | 6 +++--- test/files/neg/t4515.check | 2 +- test/files/neg/t5189b.check | 11 +++++++---- test/files/neg/t5189b.scala | 18 ++++++++++++++++++ 5 files changed, 30 insertions(+), 9 deletions(-) (limited to 'test/files') diff --git a/test/files/neg/t3015.check b/test/files/neg/t3015.check index 53221b7ca0..6948392bb0 100644 --- a/test/files/neg/t3015.check +++ b/test/files/neg/t3015.check @@ -1,5 +1,5 @@ t3015.scala:7: error: scrutinee is incompatible with pattern type; - found : _$1 where type +_$1 + found : _$1 required: String val b(foo) = "foo" ^ diff --git a/test/files/neg/t3481.check b/test/files/neg/t3481.check index 48e6ff357b..debe07275b 100644 --- a/test/files/neg/t3481.check +++ b/test/files/neg/t3481.check @@ -1,17 +1,17 @@ t3481.scala:5: error: type mismatch; found : String("hello") - required: _$1 where type +_$1 + required: _$1 f[A[Int]]("hello") ^ t3481.scala:11: error: type mismatch; - found : _$2 where type +_$2 + found : _$2 required: b.T (which expands to) _$2 def f[T <: B[_]](a: T#T, b: T) = b.m(a) ^ t3481.scala:12: error: type mismatch; found : String("Hello") - required: _$2 where type +_$2 + required: _$2 f("Hello", new B[Int]) ^ t3481.scala:18: error: type mismatch; diff --git a/test/files/neg/t4515.check b/test/files/neg/t4515.check index ce5350b35f..a60d16295f 100644 --- a/test/files/neg/t4515.check +++ b/test/files/neg/t4515.check @@ -1,6 +1,6 @@ t4515.scala:37: error: type mismatch; found : _0(in value $anonfun) where type _0(in value $anonfun) - required: (some other)_0(in value $anonfun) where type +(some other)_0(in value $anonfun) + required: (some other)_0(in value $anonfun) handler.onEvent(target, ctx.getEvent, node, ctx) ^ one error found diff --git a/test/files/neg/t5189b.check b/test/files/neg/t5189b.check index 7f78cbb438..46996e96d0 100644 --- a/test/files/neg/t5189b.check +++ b/test/files/neg/t5189b.check @@ -1,8 +1,11 @@ -t5189b.scala:25: error: type mismatch; - found : TestNeg.Wrapped[?T2] where type ?T2 <: T +t5189b.scala:38: error: type mismatch; + found : TestNeg.Wrapped[?T7] where type ?T7 <: T (this is a GADT skolem) required: TestNeg.Wrapped[T] -Note: ?T2 <: T, but class Wrapped is invariant in type W. +Note: ?T7 <: T, but class Wrapped is invariant in type W. You may wish to define W as +W instead. (SLS 4.5) case Wrapper/*[_ <: T ]*/(wrapped) => wrapped // : Wrapped[_ <: T], which is a subtype of Wrapped[T] if and only if Wrapped is covariant in its type parameter ^ -one error found +t5189b.scala:51: error: value foo is not a member of type parameter T + case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation) + ^ +two errors found diff --git a/test/files/neg/t5189b.scala b/test/files/neg/t5189b.scala index 1750f14084..7c1871dc97 100644 --- a/test/files/neg/t5189b.scala +++ b/test/files/neg/t5189b.scala @@ -5,8 +5,21 @@ class TestPos { def unwrap[T](x: AbsWrapperCov[T]): T = x match { case Wrapper/*[_ <: T ]*/(x) => x // _ <: T, which is a subtype of T } + + def unwrapOption[T](x: Option[T]): T = x match { + case Some(xs) => xs + } + + + case class Down[+T](x: T) + case class Up[-T](f: T => Unit) + + def f1[T](x1: Down[T])(x2: Up[T]) = ((x1, x2)) match { + case (Down(x), Up(f)) => f(x) + } } + object TestNeg extends App { class AbsWrapperCov[+A] case class Wrapper[B](x: Wrapped[B]) extends AbsWrapperCov[B] @@ -33,6 +46,11 @@ object TestNeg extends App { // val w = new Wrapped(new A) // unwrap[Any](Wrapper(w)).cell = new B // w.cell.imNotAB + + def unwrapOption[T](x: Option[T]): T = x match { + case Some(xs) => xs.foo // the error message should not refer to a skolem (testing extrapolation) + } + } // class TestPos1 { -- cgit v1.2.3 From c82ecabad6fc050411495f3fd50c3bf79ac7e96e Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Mar 2012 18:26:55 -0700 Subject: Finally did something about broken irrefutability. The parser has always been confused about tuple patterns in for comprehensions. It thinks it can fail to recognize an irrefutable pattern and have it removed in refchecks, but it is sadly mistaken, because the unnecessary filter has a tendency to fail the compile in typer. Look more intently for irrefutable patterns and don't insert the unnecessary filter. Closes SI-5589, SI-1336. --- src/compiler/scala/reflect/internal/TreeInfo.scala | 20 ++++++++++++- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 34 ++++++++++------------ test/files/neg/t5589neg.check | 30 +++++++++++++++++++ test/files/neg/t5589neg.scala | 28 ++++++++++++++++++ test/files/pos/t1336.scala | 10 +++++++ test/files/pos/t5589.scala | 22 ++++++++++++++ 6 files changed, 124 insertions(+), 20 deletions(-) create mode 100644 test/files/neg/t5589neg.check create mode 100644 test/files/neg/t5589neg.scala create mode 100644 test/files/pos/t1336.scala create mode 100644 test/files/pos/t5589.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala index 769d7a9ed1..ce3de94335 100644 --- a/src/compiler/scala/reflect/internal/TreeInfo.scala +++ b/src/compiler/scala/reflect/internal/TreeInfo.scala @@ -17,7 +17,7 @@ abstract class TreeInfo { val global: SymbolTable import global._ - import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass } + import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass } /* Does not seem to be used. Not sure what it does anyway. def isOwnerDefinition(tree: Tree): Boolean = tree match { @@ -312,6 +312,24 @@ abstract class TreeInfo { case _ => false } + /** Is this tree comprised of nothing but identifiers, + * but possibly in bindings or tuples? For instance + * + * foo @ (bar, (baz, quux)) + * + * is a variable pattern; if the structure matches, + * then the remainder is inevitable. + */ + def isVariablePattern(tree: Tree): Boolean = tree match { + case Bind(name, pat) => isVariablePattern(pat) + case Ident(name) => true + case Apply(sel, args) => + ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName) + && (args forall isVariablePattern) + ) + case _ => false + } + /** Is this argument node of the form : _* ? */ def isWildcardStarArg(tree: Tree): Boolean = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 0d2fbc5372..80c258e456 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -262,29 +262,25 @@ abstract class TreeBuilder { else if (stats.length == 1) stats.head else Block(stats.init, stats.last) + def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = { + val cases = List( + CaseDef(condition, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) + ) + val matchTree = makeVisitor(cases, false, scrutineeName) + + atPos(tree.pos)(Apply(Select(tree, nme.filter), matchTree :: Nil)) + } + /** Create tree for for-comprehension generator */ def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = { val pat1 = patvarTransformer.transform(pat) val rhs1 = - if (valeq) rhs - else matchVarPattern(pat1) match { - case Some(_) => - rhs - case None => - atPos(rhs.pos) { - Apply( - Select(rhs, nme.filter), - List( - makeVisitor( - List( - CaseDef(pat1.duplicate, EmptyTree, Literal(Constant(true))), - CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))), - false, - nme.CHECK_IF_REFUTABLE_STRING - ))) - } - } - if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1) + if (valeq || treeInfo.isVariablePattern(pat)) rhs + else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING) + + if (valeq) ValEq(pos, pat1, rhs1) + else ValFrom(pos, pat1, rhs1) } def makeParam(pname: TermName, tpe: Tree) = diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check new file mode 100644 index 0000000000..e75fd2f4f7 --- /dev/null +++ b/test/files/neg/t5589neg.check @@ -0,0 +1,30 @@ +t5589neg.scala:24: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:25: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:26: error: constructor cannot be instantiated to expected type; + found : (T1,) + required: (String, Int) + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:26: error: not found: value y2 + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:27: error: constructor cannot be instantiated to expected type; + found : (T1, T2, T3) + required: (String, Int) + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:27: error: not found: value y1 + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:27: error: not found: value y2 + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +7 errors found diff --git a/test/files/neg/t5589neg.scala b/test/files/neg/t5589neg.scala new file mode 100644 index 0000000000..ddd382d8d8 --- /dev/null +++ b/test/files/neg/t5589neg.scala @@ -0,0 +1,28 @@ +class A { + // First three compile. + def f1(x: Either[Int, String]) = x.right map (y => y) + def f2(x: Either[Int, String]) = for (y <- x.right) yield y + def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) } + // Last one fails. + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) +/** +./a.scala:5: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: Either[Nothing,(String, Int)] + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y1 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y2 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +three errors found +**/ + + + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) +} diff --git a/test/files/pos/t1336.scala b/test/files/pos/t1336.scala new file mode 100644 index 0000000000..63967985c7 --- /dev/null +++ b/test/files/pos/t1336.scala @@ -0,0 +1,10 @@ +object Foo { + def foreach( f : ((Int,Int)) => Unit ) { + println("foreach") + f(1,2) + } + + for( (a,b) <- this ) { + println((a,b)) + } +} diff --git a/test/files/pos/t5589.scala b/test/files/pos/t5589.scala new file mode 100644 index 0000000000..69cbb20391 --- /dev/null +++ b/test/files/pos/t5589.scala @@ -0,0 +1,22 @@ +class A { + // First three compile. + def f1(x: Either[Int, String]) = x.right map (y => y) + def f2(x: Either[Int, String]) = for (y <- x.right) yield y + def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) } + // Last one fails. + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) +/** +./a.scala:5: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: Either[Nothing,(String, Int)] + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y1 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y2 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +three errors found +**/ +} -- cgit v1.2.3 From 365bb2b4e3ac880243736bf039b649a63b00ccb2 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Mar 2012 19:44:58 -0700 Subject: Discovered filter was still being generated. Rather than withFilter, for a subset of for comprehension structures. Not sure if this was somewhat by design - only seems possible because refchecks was only looking for nme.filter, not nme.withFilter, so perhaps this was intended as some secret irrefutability backchannel? Really have to document that sort of thing if it's intentional. I assumed it wasn't and unified everything. --- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/pos/irrefutable.scala | 22 ++++++++++++++++++++++ 3 files changed, 24 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/irrefutable.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 80c258e456..0bc88d1efd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -269,7 +269,7 @@ abstract class TreeBuilder { ) val matchTree = makeVisitor(cases, false, scrutineeName) - atPos(tree.pos)(Apply(Select(tree, nme.filter), matchTree :: Nil)) + atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil)) } /** Create tree for for-comprehension generator */ diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ec42d251ff..73369f09af 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1445,7 +1445,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R private def transformApply(tree: Apply): Tree = tree match { case Apply( - Select(qual, nme.filter), + Select(qual, nme.filter | nme.withFilter), List(Function( List(ValDef(_, pname, tpt, _)), Match(_, CaseDef(pat1, _, _) :: _)))) diff --git a/test/files/pos/irrefutable.scala b/test/files/pos/irrefutable.scala new file mode 100644 index 0000000000..0a792b644a --- /dev/null +++ b/test/files/pos/irrefutable.scala @@ -0,0 +1,22 @@ +// The test which this should perform but does not +// is that f1 is recognized as irrefutable and f2 is not +// This can be recognized via the generated classes: +// +// A$$anonfun$f1$1.class +// A$$anonfun$f2$1.class +// A$$anonfun$f2$2.class +// +// The extra one in $f2$ is the filter. +// +// !!! Marking with exclamation points so maybe someday +// this test will be finished. +class A { + case class Foo[T](x: T) + + def f1(xs: List[Foo[Int]]) = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: List[Foo[Any]]) = { + for (Foo(x: Int) <- xs) yield x + } +} -- cgit v1.2.3 From 032b209125585011194e6195f1244b882b5b4d8f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Mar 2012 20:10:29 -0700 Subject: Spiced up the irrefutability tests a bit. --- test/files/neg/t5589neg.check | 21 ++++++++++++++------- test/files/neg/t5589neg.scala | 22 ---------------------- test/files/neg/t5589neg2.check | 9 +++++++++ test/files/neg/t5589neg2.scala | 13 +++++++++++++ 4 files changed, 36 insertions(+), 29 deletions(-) create mode 100644 test/files/neg/t5589neg2.check create mode 100644 test/files/neg/t5589neg2.scala (limited to 'test/files') diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check index e75fd2f4f7..b3ff16d7e4 100644 --- a/test/files/neg/t5589neg.check +++ b/test/files/neg/t5589neg.check @@ -1,30 +1,37 @@ -t5589neg.scala:24: error: constructor cannot be instantiated to expected type; +t5589neg.scala:2: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:2: error: constructor cannot be instantiated to expected type; found : (T1, T2) required: String def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:25: error: constructor cannot be instantiated to expected type; +t5589neg.scala:3: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:3: error: constructor cannot be instantiated to expected type; found : (T1, T2) required: String def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:26: error: constructor cannot be instantiated to expected type; +t5589neg.scala:4: error: constructor cannot be instantiated to expected type; found : (T1,) required: (String, Int) def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:26: error: not found: value y2 +t5589neg.scala:4: error: not found: value y2 def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:27: error: constructor cannot be instantiated to expected type; +t5589neg.scala:5: error: constructor cannot be instantiated to expected type; found : (T1, T2, T3) required: (String, Int) def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:27: error: not found: value y1 +t5589neg.scala:5: error: not found: value y1 def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) ^ -t5589neg.scala:27: error: not found: value y2 +t5589neg.scala:5: error: not found: value y2 def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) ^ +two warnings found 7 errors found diff --git a/test/files/neg/t5589neg.scala b/test/files/neg/t5589neg.scala index ddd382d8d8..31ff2c3693 100644 --- a/test/files/neg/t5589neg.scala +++ b/test/files/neg/t5589neg.scala @@ -1,26 +1,4 @@ class A { - // First three compile. - def f1(x: Either[Int, String]) = x.right map (y => y) - def f2(x: Either[Int, String]) = for (y <- x.right) yield y - def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) } - // Last one fails. - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) -/** -./a.scala:5: error: constructor cannot be instantiated to expected type; - found : (T1, T2) - required: Either[Nothing,(String, Int)] - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -./a.scala:5: error: not found: value y1 - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -./a.scala:5: error: not found: value y2 - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -three errors found -**/ - - def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) diff --git a/test/files/neg/t5589neg2.check b/test/files/neg/t5589neg2.check new file mode 100644 index 0000000000..6af4955a83 --- /dev/null +++ b/test/files/neg/t5589neg2.check @@ -0,0 +1,9 @@ +t5589neg2.scala:7: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + ^ +t5589neg2.scala:7: error: not found: value d + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + ^ +two errors found diff --git a/test/files/neg/t5589neg2.scala b/test/files/neg/t5589neg2.scala new file mode 100644 index 0000000000..b7c7ab7218 --- /dev/null +++ b/test/files/neg/t5589neg2.scala @@ -0,0 +1,13 @@ +class A { + def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok + } + + def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + } + + def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok + } +} \ No newline at end of file -- cgit v1.2.3 From fb44bb28b8b3e7861b96c874dc79072f89fec10b Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Mon, 19 Mar 2012 20:22:25 -0700 Subject: Test cases closes SI-4574. Looks like I got that irrefutability bug too. --- test/files/run/t4574.check | 2 ++ test/files/run/t4574.scala | 13 +++++++++++++ 2 files changed, 15 insertions(+) create mode 100644 test/files/run/t4574.check create mode 100644 test/files/run/t4574.scala (limited to 'test/files') diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check new file mode 100644 index 0000000000..a4522fff24 --- /dev/null +++ b/test/files/run/t4574.check @@ -0,0 +1,2 @@ +I hereby refute null! +I denounce null as unListLike! diff --git a/test/files/run/t4574.scala b/test/files/run/t4574.scala new file mode 100644 index 0000000000..1dde496aca --- /dev/null +++ b/test/files/run/t4574.scala @@ -0,0 +1,13 @@ +object Test { + val xs: List[(Int, Int)] = List((2, 2), null) + + def expectMatchError[T](msg: String)(body: => T) { + try { body ; assert(false, "Should not succeed.") } + catch { case _: MatchError => println(msg) } + } + + def main(args: Array[String]): Unit = { + expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x ) + expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } ) + } +} -- cgit v1.2.3 From c57699bc0214a3fef40bf5a656b569799b2365ea Mon Sep 17 00:00:00 2001 From: Vojin Jovanovic Date: Tue, 20 Mar 2012 17:51:27 +0100 Subject: Cleaned up Duration for the standard library. - Removed Timeout from the library. Each library should provide its own implementation of Timeout - Removed duration package object and replaced it with regular object - Removed usages of post Java 1.5 TimeUnit members - Added factory methods for FiniteDuration - Cleaned up some unnecessary comments - Merged duration DSL with Duration.scala file Review by: @phaller --- src/library/scala/concurrent/Awaitable.scala | 2 +- .../scala/concurrent/ConcurrentPackageObject.scala | 7 +- .../scala/concurrent/ExecutionContext.scala | 2 +- src/library/scala/concurrent/Future.scala | 3 +- src/library/scala/concurrent/Scheduler.scala | 2 +- .../default/SchedulerImpl.scala.disabled | 2 +- .../concurrent/default/TaskImpl.scala.disabled | 2 +- .../concurrent/impl/ExecutionContextImpl.scala | 3 +- src/library/scala/concurrent/impl/Promise.scala | 2 +- src/library/scala/concurrent/package.scala | 3 +- src/library/scala/concurrent/util/Duration.scala | 578 +++++++++++++++++++++ src/library/scala/util/Duration.scala | 485 ----------------- src/library/scala/util/Timeout.scala | 33 -- test/files/jvm/scala-concurrent-tck.scala | 2 +- 14 files changed, 595 insertions(+), 531 deletions(-) create mode 100644 src/library/scala/concurrent/util/Duration.scala delete mode 100644 src/library/scala/util/Duration.scala delete mode 100644 src/library/scala/util/Timeout.scala (limited to 'test/files') diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index c38e668f30..6c9995eb05 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -11,7 +11,7 @@ package scala.concurrent import scala.annotation.implicitNotFound -import scala.util.Duration +import scala.concurrent.util.Duration diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala index 7d005838d3..3471095051 100644 --- a/src/library/scala/concurrent/ConcurrentPackageObject.scala +++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala @@ -12,7 +12,8 @@ package scala.concurrent import java.util.concurrent.{ Executors, ExecutorService } import scala.concurrent.forkjoin.ForkJoinPool -import scala.util.{ Duration, Try, Success, Failure } +import scala.util.{ Try, Success, Failure } +import scala.concurrent.util.Duration import ConcurrentPackageObject._ @@ -26,8 +27,8 @@ abstract class ConcurrentPackageObject { new impl.ExecutionContextImpl(getExecutorService) private[concurrent] def getExecutorService: AnyRef = - if (util.Properties.isJavaAtLeast("1.6")) { - val vendor = util.Properties.javaVmVendor + if (scala.util.Properties.isJavaAtLeast("1.6")) { + val vendor = scala.util.Properties.javaVmVendor if ((vendor contains "Oracle") || (vendor contains "Sun") || (vendor contains "Apple")) new ForkJoinPool else Executors.newCachedThreadPool() } else Executors.newCachedThreadPool() diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index eb1b3355c0..c4a45f9fb5 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -12,7 +12,7 @@ package scala.concurrent import java.util.concurrent.atomic.{ AtomicInteger } import java.util.concurrent.{ Executors, Future => JFuture, Callable } -import scala.util.Duration +import scala.concurrent.util.Duration import scala.util.{ Try, Success, Failure } import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveTask => FJTask, RecursiveAction, ForkJoinWorkerThread } import scala.collection.generic.CanBuildFrom diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index eb54b61db0..1dc8e38355 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -17,7 +17,8 @@ import java.util.{ LinkedList => JLinkedList } import java.{ lang => jl } import java.util.concurrent.atomic.{ AtomicReferenceFieldUpdater, AtomicInteger, AtomicBoolean } -import scala.util.{ Timeout, Duration, Try, Success, Failure } +import scala.util.{ Try, Success, Failure } +import scala.concurrent.util.Duration import scala.Option import scala.annotation.tailrec diff --git a/src/library/scala/concurrent/Scheduler.scala b/src/library/scala/concurrent/Scheduler.scala index 39d798e6b4..e2eb4d69fe 100644 --- a/src/library/scala/concurrent/Scheduler.scala +++ b/src/library/scala/concurrent/Scheduler.scala @@ -8,7 +8,7 @@ package scala.concurrent -import scala.util.Duration +import scala.concurrent.util.Duration /** A service for scheduling tasks and thunks for one-time, or periodic execution. */ diff --git a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled index 745d2d1a15..241efa8857 100644 --- a/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled +++ b/src/library/scala/concurrent/default/SchedulerImpl.scala.disabled @@ -9,7 +9,7 @@ package scala.concurrent package default -import scala.util.Duration +import scala.concurrent.util.Duration private[concurrent] final class SchedulerImpl extends Scheduler { private val timer = diff --git a/src/library/scala/concurrent/default/TaskImpl.scala.disabled b/src/library/scala/concurrent/default/TaskImpl.scala.disabled index 94e54cb372..50753a7154 100644 --- a/src/library/scala/concurrent/default/TaskImpl.scala.disabled +++ b/src/library/scala/concurrent/default/TaskImpl.scala.disabled @@ -7,7 +7,7 @@ import java.util.concurrent.atomic.AtomicReferenceFieldUpdater import scala.concurrent.forkjoin.{ ForkJoinPool, RecursiveAction, ForkJoinWorkerThread } import scala.util.Try import scala.util -import scala.util.Duration +import scala.concurrent.util.Duration import scala.annotation.tailrec diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 7984aa02b7..dd1de2025e 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -13,7 +13,8 @@ package scala.concurrent.impl import java.util.concurrent.{Callable, ExecutorService} import scala.concurrent.forkjoin._ import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable} -import scala.util.{ Duration, Try, Success, Failure } +import scala.util.{ Try, Success, Failure } +import scala.concurrent.util.{ Duration } import scala.collection.mutable.Stack diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 0087b71ea8..3d40ae6930 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -14,7 +14,7 @@ import java.util.concurrent.TimeUnit.{ NANOSECONDS, MILLISECONDS } import java.util.concurrent.atomic.AtomicReferenceFieldUpdater import scala.concurrent.{Awaitable, ExecutionContext, resolve, resolver, blocking, CanAwait, TimeoutException} //import scala.util.continuations._ -import scala.util.Duration +import scala.concurrent.util.Duration import scala.util.Try import scala.util import scala.annotation.tailrec diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index 7cc48c09b2..204b3f2673 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -8,7 +8,8 @@ package scala -import scala.util.{ Duration, Try, Success, Failure } +import scala.util.{ Try, Success, Failure } +import scala.concurrent.util.Duration /** This package object contains primitives for concurrent and parallel programming. */ diff --git a/src/library/scala/concurrent/util/Duration.scala b/src/library/scala/concurrent/util/Duration.scala new file mode 100644 index 0000000000..33d034da76 --- /dev/null +++ b/src/library/scala/concurrent/util/Duration.scala @@ -0,0 +1,578 @@ +/** + * Copyright (C) 2009-2012 Typesafe Inc. + */ + +package scala.concurrent.util + +import java.util.concurrent.TimeUnit +import TimeUnit._ +import java.lang.{ Double ⇒ JDouble } + +object DurationImplicits { + trait Classifier[C] { + type R + def convert(d: FiniteDuration): R + } + + object span + implicit object spanConvert extends Classifier[span.type] { + type R = FiniteDuration + def convert(d: FiniteDuration) = d + } + + object fromNow + implicit object fromNowConvert extends Classifier[fromNow.type] { + type R = Deadline + def convert(d: FiniteDuration) = Deadline.now + d + } + + implicit def intToDurationInt(n: Int) = new DurationInt(n) + implicit def longToDurationLong(n: Long) = new DurationLong(n) + implicit def doubleToDurationDouble(d: Double) = new DurationDouble(d) + + implicit def pairIntToDuration(p: (Int, TimeUnit)) = Duration(p._1, p._2) + implicit def pairLongToDuration(p: (Long, TimeUnit)) = Duration(p._1, p._2) + implicit def durationToPair(d: Duration) = (d.length, d.unit) + + /* + * Avoid reflection based invocation by using non-duck type + */ + class IntMult(i: Int) { + def *(d: Duration) = d * i + } + implicit def intMult(i: Int) = new IntMult(i) + + class LongMult(l: Long) { + def *(d: Duration) = d * l + } + implicit def longMult(l: Long) = new LongMult(l) + + class DoubleMult(f: Double) { + def *(d: Duration) = d * f + } + implicit def doubleMult(f: Double) = new DoubleMult(f) +} + +case class Deadline private (time: Duration) { + def +(other: Duration): Deadline = copy(time = time + other) + def -(other: Duration): Deadline = copy(time = time - other) + def -(other: Deadline): Duration = time - other.time + def timeLeft: Duration = this - Deadline.now + def hasTimeLeft(): Boolean = !isOverdue() + def isOverdue(): Boolean = (time.toNanos - System.nanoTime()) < 0 +} + +object Deadline { + def now: Deadline = Deadline(Duration(System.nanoTime, NANOSECONDS)) +} + +object Duration { + implicit def timeLeft(implicit d: Deadline): Duration = d.timeLeft + + def apply(length: Long, unit: TimeUnit): FiniteDuration = new FiniteDuration(length, unit) + def apply(length: Double, unit: TimeUnit): FiniteDuration = fromNanos(unit.toNanos(1) * length) + def apply(length: Long, unit: String): FiniteDuration = { + val (mult, timeUnit) = Duration.timeUnit(unit) + new FiniteDuration(length * mult, timeUnit) + } + + /** + * Construct a Duration by parsing a String. In case of a format error, a + * RuntimeException is thrown. See `unapply(String)` for more information. + */ + def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error " + s) + + private val RE = ("""^\s*([\+|-]?\d+(?:\.\d+)?)\s*""" + // length part + "(?:" + // units are distinguished in separate match groups + "(d|day|days)|" + + "(h|hour|hours)|" + + "(min|minute|minutes)|" + + "(s|sec|second|seconds)|" + + "(ms|milli|millis|millisecond|milliseconds)|" + + "(µs|micro|micros|microsecond|microseconds)|" + + "(ns|nano|nanos|nanosecond|nanoseconds)" + + """)\s*$""").r // close the non-capturing group + private val REinf = """^\s*(?:\+|Plus)?Inf\s*$""".r + private val REminf = """^\s*(?:-|Minus)Inf\s*""".r + + /** + * Deconstruct a Duration into `Long` length and [[java.util.concurrent.TimeUnit]] if it is a + * [[scala.util.concurrent.FiniteDuration]]. + * + * @param d Duration to be deconstructed. + */ + def unapply(d: Duration): Option[(Long, TimeUnit)] = { + if (d.finite_?) { + Some((d.length, d.unit)) + } else { + None + } + } + + /** + * Parse String, return None if no match. Format is `""`, where + * whitespace is allowed before, between and after the parts. Infinities are + * designated by `"Inf"`, `"PlusInf"`, `"+Inf"` and `"-Inf"` or `"MinusInf"`. + */ + def unapply(s: String): Option[Duration] = s match { + case RE(length, d, h, m, s, ms, mus, ns) ⇒ + if (d ne null) + Some(Duration(JDouble.parseDouble(length) * 86400, SECONDS)) + else if (h ne null) + Some(Duration(JDouble.parseDouble(length) * 3600, SECONDS)) + else if (m ne null) + Some(Duration(JDouble.parseDouble(length) * 60, SECONDS)) + else if (s ne null) + Some(Duration(JDouble.parseDouble(length), SECONDS)) + else if (ms ne null) + Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) + else if (mus ne null) + Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) + else if (ns ne null) + Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) + else + sys.error("made some error in regex (should not be possible)") + case REinf() ⇒ Some(Inf) + case REminf() ⇒ Some(MinusInf) + case _ ⇒ None + } + + def fromNanos(nanos: Double): FiniteDuration = + fromNanos((nanos + 0.5).asInstanceOf[Long]) + + def fromNanos(nanos: Long): FiniteDuration = { + if (nanos % 86400000000000L == 0) { + Duration(nanos / 1000000000L, SECONDS) + } else if (nanos % 1000000000L == 0) { + Duration(nanos / 1000000000L, SECONDS) + } else if (nanos % 1000000000L == 0) { + Duration(nanos / 1000000000L, SECONDS) + } else if (nanos % 1000000000L == 0) { + Duration(nanos / 1000000000L, SECONDS) + } else if (nanos % 1000000L == 0) { + Duration(nanos / 1000000L, MILLISECONDS) + } else if (nanos % 1000L == 0) { + Duration(nanos / 1000L, MICROSECONDS) + } else { + Duration(nanos, NANOSECONDS) + } + } + + /** + * Parse TimeUnit from string representation. + */ + protected[util] def timeUnit(unit: String): (Long, TimeUnit) = unit.toLowerCase match { + case "d" | "day" | "days" ⇒ (86400, SECONDS) + case "h" | "hour" | "hours" ⇒ (3600, SECONDS) + case "min" | "minute" | "minutes" ⇒ (60, SECONDS) + case "s" | "sec" | "second" | "seconds" ⇒ (1, SECONDS) + case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ (1, MILLISECONDS) + case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ (1, MICROSECONDS) + case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ (1, NANOSECONDS) + } + + val Zero: FiniteDuration = new FiniteDuration(0, NANOSECONDS) + val Undefined: Duration = new Duration with Infinite { + override def toString = "Duration.Undefined" + override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this + override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration") + override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration") + override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration") + override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration") + override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration") + def compare(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration") + def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration") + } + + trait Infinite { + this: Duration ⇒ + + def +(other: Duration): Duration = + other match { + case _: this.type ⇒ this + case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities") + case _ ⇒ this + } + def -(other: Duration): Duration = + other match { + case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities") + case _ ⇒ this + } + def *(factor: Double): Duration = this + def /(factor: Double): Duration = this + def /(other: Duration): Double = + other match { + case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities") + // maybe questionable but pragmatic: Inf / 0 => Inf + case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1) + } + + def finite_? = false + + def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations") + def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations") + def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations") + def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations") + def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations") + def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations") + def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations") + def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations") + def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations") + def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations") + + } + + /** + * Infinite duration: greater than any other and not equal to any other, + * including itself. + */ + val Inf: Duration = new Duration with Infinite { + override def toString = "Duration.Inf" + def compare(other: Duration) = if (other eq this) 0 else 1 + def unary_- : Duration = MinusInf + } + + /** + * Infinite negative duration: lesser than any other and not equal to any other, + * including itself. + */ + val MinusInf: Duration = new Duration with Infinite { + override def toString = "Duration.MinusInf" + def compare(other: Duration) = if (other eq this) 0 else -1 + def unary_- : Duration = Inf + } + + // Java Factories + def create(length: Long, unit: TimeUnit): FiniteDuration = apply(length, unit) + def create(length: Double, unit: TimeUnit): FiniteDuration = apply(length, unit) + def create(length: Long, unit: String): FiniteDuration = apply(length, unit) + def parse(s: String): Duration = unapply(s).get + + implicit object DurationIsOrdered extends Ordering[Duration] { + def compare(a: Duration, b: Duration) = a compare b + } +} + +/** + * Utility for working with java.util.concurrent.TimeUnit durations. + * + *

+ * Examples: + *

+ * import scala.concurrent.util.Duration
+ * import java.util.concurrent.TimeUnit
+ *
+ * val duration = Duration(100, MILLISECONDS)
+ * val duration = Duration(100, "millis")
+ *
+ * duration.toNanos
+ * duration < 1.second
+ * duration <= Duration.Inf
+ * 
+ * + *

+ * Implicits are also provided for Int, Long and Double. Example usage: + *

+ * import scala.concurrent.util.Duration._
+ *
+ * val duration = 100 millis
+ * 
+ * + * Extractors, parsing and arithmetic are also included: + *
+ * val d = Duration("1.2 µs")
+ * val Duration(length, unit) = 5 millis
+ * val d2 = d * 2.5
+ * val d3 = d2 + 1.millisecond
+ * 
+ */ +abstract class Duration extends Serializable with Ordered[Duration] { + def length: Long + def unit: TimeUnit + def toNanos: Long + def toMicros: Long + def toMillis: Long + def toSeconds: Long + def toMinutes: Long + def toHours: Long + def toDays: Long + def toUnit(unit: TimeUnit): Double + + def +(other: Duration): Duration + def -(other: Duration): Duration + def *(factor: Double): Duration + def /(factor: Double): Duration + def /(other: Duration): Double + def unary_- : Duration + def finite_? : Boolean + def min(other: Duration): Duration = if (this < other) this else other + def max(other: Duration): Duration = if (this > other) this else other + def fromNow: Deadline = Deadline.now + this + + // Java API + def lt(other: Duration) = this < other + def lteq(other: Duration) = this <= other + def gt(other: Duration) = this > other + def gteq(other: Duration) = this >= other + def plus(other: Duration) = this + other + def minus(other: Duration) = this - other + def mul(factor: Double) = this * factor + def div(factor: Double) = this / factor + def div(other: Duration) = this / other + def neg() = -this + def isFinite() = finite_? +} + +object FiniteDuration { + implicit object FiniteDurationIsOrdered extends Ordering[FiniteDuration] { + def compare(a: FiniteDuration, b: FiniteDuration) = a compare b + } + + def apply(length: Long, unit: TimeUnit) = + new FiniteDuration(length, unit) + + def apply(length: Long, unit: String) = { + val (mult, timeUnit) = Duration.timeUnit(unit) + new FiniteDuration(length * mult, timeUnit) + } + +} + +class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration { + import Duration._ + + def toNanos = unit.toNanos(length) + def toMicros = unit.toMicros(length) + def toMillis = unit.toMillis(length) + def toSeconds = unit.toSeconds(length) + def toMinutes = unit.toMinutes(length) + def toHours = unit.toHours(length) + def toDays = unit.toDays(length) + def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u) + + override def toString = this match { + case Duration(1, SECONDS) ⇒ "1 second" + case Duration(x, SECONDS) ⇒ x + " seconds" + case Duration(1, MILLISECONDS) ⇒ "1 millisecond" + case Duration(x, MILLISECONDS) ⇒ x + " milliseconds" + case Duration(1, MICROSECONDS) ⇒ "1 microsecond" + case Duration(x, MICROSECONDS) ⇒ x + " microseconds" + case Duration(1, NANOSECONDS) ⇒ "1 nanosecond" + case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds" + } + + def compare(other: Duration) = + if (other.finite_?) { + val me = toNanos + val o = other.toNanos + if (me > o) 1 else if (me < o) -1 else 0 + } else -other.compare(this) + + def +(other: Duration) = { + if (!other.finite_?) { + other + } else { + val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos + fromNanos(nanos) + } + } + + def -(other: Duration) = { + if (!other.finite_?) { + other + } else { + val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos + fromNanos(nanos) + } + } + + def *(factor: Double) = fromNanos(long2double(toNanos) * factor) + + def /(factor: Double) = fromNanos(long2double(toNanos) / factor) + + def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0 + + def unary_- = Duration(-length, unit) + + def finite_? = true + + override def equals(other: Any) = + other.isInstanceOf[FiniteDuration] && + toNanos == other.asInstanceOf[FiniteDuration].toNanos + + override def hashCode = toNanos.asInstanceOf[Int] +} + +class DurationInt(n: Int) { + import DurationImplicits.Classifier + + def nanoseconds = Duration(n, NANOSECONDS) + def nanos = Duration(n, NANOSECONDS) + def nanosecond = Duration(n, NANOSECONDS) + def nano = Duration(n, NANOSECONDS) + + def microseconds = Duration(n, MICROSECONDS) + def micros = Duration(n, MICROSECONDS) + def microsecond = Duration(n, MICROSECONDS) + def micro = Duration(n, MICROSECONDS) + + def milliseconds = Duration(n, MILLISECONDS) + def millis = Duration(n, MILLISECONDS) + def millisecond = Duration(n, MILLISECONDS) + def milli = Duration(n, MILLISECONDS) + + def seconds = Duration(n, SECONDS) + def second = Duration(n, SECONDS) + + def minutes = Duration(n * 60, SECONDS) + def minute = Duration(n * 60, SECONDS) + + def hours = Duration(n * 3600, SECONDS) + def hour = Duration(n * 3600, SECONDS) + + def days = Duration(n * 86400, SECONDS) + def day = Duration(n * 86400, SECONDS) + + def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + + def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + + def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + + def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS)) + def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS)) + + def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS)) + def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS)) + + def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS)) + def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS)) + + def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS)) + def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS)) +} + +class DurationLong(n: Long) { + import DurationImplicits.Classifier + + def nanoseconds = Duration(n, NANOSECONDS) + def nanos = Duration(n, NANOSECONDS) + def nanosecond = Duration(n, NANOSECONDS) + def nano = Duration(n, NANOSECONDS) + + def microseconds = Duration(n, MICROSECONDS) + def micros = Duration(n, MICROSECONDS) + def microsecond = Duration(n, MICROSECONDS) + def micro = Duration(n, MICROSECONDS) + + def milliseconds = Duration(n, MILLISECONDS) + def millis = Duration(n, MILLISECONDS) + def millisecond = Duration(n, MILLISECONDS) + def milli = Duration(n, MILLISECONDS) + + def seconds = Duration(n, SECONDS) + def second = Duration(n, SECONDS) + + def minutes = Duration(n * 60, SECONDS) + def minute = Duration(n * 60, SECONDS) + + def hours = Duration(n * 3600, SECONDS) + def hour = Duration(n * 3600, SECONDS) + + def days = Duration(n * 86400, SECONDS) + def day = Duration(n * 86400, SECONDS) + + def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, NANOSECONDS)) + + def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MICROSECONDS)) + + def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, MILLISECONDS)) + + def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS)) + def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n, SECONDS)) + + def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS)) + def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 60, SECONDS)) + + def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS)) + def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 3600, SECONDS)) + + def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS)) + def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(n * 86400, SECONDS)) +} + +class DurationDouble(d: Double) { + import DurationImplicits.Classifier + + def nanoseconds = Duration(d, NANOSECONDS) + def nanos = Duration(d, NANOSECONDS) + def nanosecond = Duration(d, NANOSECONDS) + def nano = Duration(d, NANOSECONDS) + + def microseconds = Duration(d, MICROSECONDS) + def micros = Duration(d, MICROSECONDS) + def microsecond = Duration(d, MICROSECONDS) + def micro = Duration(d, MICROSECONDS) + + def milliseconds = Duration(d, MILLISECONDS) + def millis = Duration(d, MILLISECONDS) + def millisecond = Duration(d, MILLISECONDS) + def milli = Duration(d, MILLISECONDS) + + def seconds = Duration(d, SECONDS) + def second = Duration(d, SECONDS) + + def minutes = Duration(d * 60, SECONDS) + def minute = Duration(d * 60, SECONDS) + + def hours = Duration(d * 3600, SECONDS) + def hour = Duration(d * 3600, SECONDS) + + def days = Duration(d * 86400, SECONDS) + def day = Duration(d * 86400, SECONDS) + + def nanoseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS)) + def nanos[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS)) + def nanosecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS)) + def nano[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, NANOSECONDS)) + + def microseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS)) + def micros[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS)) + def microsecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS)) + def micro[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MICROSECONDS)) + + def milliseconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS)) + def millis[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS)) + def millisecond[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS)) + def milli[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, MILLISECONDS)) + + def seconds[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS)) + def second[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d, SECONDS)) + + def minutes[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS)) + def minute[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 60, SECONDS)) + + def hours[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS)) + def hour[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 3600, SECONDS)) + + def days[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS)) + def day[C, CC <: Classifier[C]](c: C)(implicit ev: CC): CC#R = ev.convert(Duration(d * 86400, SECONDS)) +} diff --git a/src/library/scala/util/Duration.scala b/src/library/scala/util/Duration.scala deleted file mode 100644 index 4c118f8b3b..0000000000 --- a/src/library/scala/util/Duration.scala +++ /dev/null @@ -1,485 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ - -package scala.util - -import java.util.concurrent.TimeUnit -import TimeUnit._ -import java.lang.{ Long ⇒ JLong, Double ⇒ JDouble } -//import akka.actor.ActorSystem (commented methods) - -class TimerException(message: String) extends RuntimeException(message) - -/** - * Simple timer class. - * Usage: - *
- *   import akka.util.duration._
- *   import akka.util.Timer
- *
- *   val timer = Timer(30.seconds)
- *   while (timer.isTicking) { ... }
- * 
- */ -case class Timer(duration: Duration, throwExceptionOnTimeout: Boolean = false) { - val startTimeInMillis = System.currentTimeMillis - val timeoutInMillis = duration.toMillis - - /** - * Returns true while the timer is ticking. After that it either throws and exception or - * returns false. Depending on if the 'throwExceptionOnTimeout' argument is true or false. - */ - def isTicking: Boolean = { - if (!(timeoutInMillis > (System.currentTimeMillis - startTimeInMillis))) { - if (throwExceptionOnTimeout) throw new TimerException("Time out after " + duration) - else false - } else true - } -} - -object Duration { - def apply(length: Long, unit: TimeUnit): Duration = new FiniteDuration(length, unit) - def apply(length: Double, unit: TimeUnit): Duration = fromNanos(unit.toNanos(1) * length) - def apply(length: Long, unit: String): Duration = new FiniteDuration(length, timeUnit(unit)) - - def fromNanos(nanos: Long): Duration = { - if (nanos % 86400000000000L == 0) { - Duration(nanos / 86400000000000L, DAYS) - } else if (nanos % 3600000000000L == 0) { - Duration(nanos / 3600000000000L, HOURS) - } else if (nanos % 60000000000L == 0) { - Duration(nanos / 60000000000L, MINUTES) - } else if (nanos % 1000000000L == 0) { - Duration(nanos / 1000000000L, SECONDS) - } else if (nanos % 1000000L == 0) { - Duration(nanos / 1000000L, MILLISECONDS) - } else if (nanos % 1000L == 0) { - Duration(nanos / 1000L, MICROSECONDS) - } else { - Duration(nanos, NANOSECONDS) - } - } - - def fromNanos(nanos: Double): Duration = fromNanos((nanos + 0.5).asInstanceOf[Long]) - - /** - * Construct a Duration by parsing a String. In case of a format error, a - * RuntimeException is thrown. See `unapply(String)` for more information. - */ - def apply(s: String): Duration = unapply(s) getOrElse sys.error("format error") - - /** - * Deconstruct a Duration into length and unit if it is finite. - */ - def unapply(d: Duration): Option[(Long, TimeUnit)] = { - if (d.finite_?) { - Some((d.length, d.unit)) - } else { - None - } - } - - private val RE = ("""^\s*(\d+(?:\.\d+)?)\s*""" + // length part - "(?:" + // units are distinguished in separate match groups - "(d|day|days)|" + - "(h|hour|hours)|" + - "(min|minute|minutes)|" + - "(s|sec|second|seconds)|" + - "(ms|milli|millis|millisecond|milliseconds)|" + - "(µs|micro|micros|microsecond|microseconds)|" + - "(ns|nano|nanos|nanosecond|nanoseconds)" + - """)\s*$""").r // close the non-capturing group - private val REinf = """^\s*Inf\s*$""".r - private val REminf = """^\s*(?:-\s*|Minus)Inf\s*""".r - - /** - * Parse String, return None if no match. Format is `""`, where - * whitespace is allowed before, between and after the parts. Infinities are - * designated by `"Inf"` and `"-Inf"` or `"MinusInf"`. - */ - def unapply(s: String): Option[Duration] = s match { - case RE(length, d, h, m, s, ms, mus, ns) ⇒ - if (d ne null) Some(Duration(JDouble.parseDouble(length), DAYS)) else if (h ne null) Some(Duration(JDouble.parseDouble(length), HOURS)) else if (m ne null) Some(Duration(JDouble.parseDouble(length), MINUTES)) else if (s ne null) Some(Duration(JDouble.parseDouble(length), SECONDS)) else if (ms ne null) Some(Duration(JDouble.parseDouble(length), MILLISECONDS)) else if (mus ne null) Some(Duration(JDouble.parseDouble(length), MICROSECONDS)) else if (ns ne null) Some(Duration(JDouble.parseDouble(length), NANOSECONDS)) else - sys.error("made some error in regex (should not be possible)") - case REinf() ⇒ Some(Inf) - case REminf() ⇒ Some(MinusInf) - case _ ⇒ None - } - - /** - * Parse TimeUnit from string representation. - */ - def timeUnit(unit: String) = unit.toLowerCase match { - case "d" | "day" | "days" ⇒ DAYS - case "h" | "hour" | "hours" ⇒ HOURS - case "min" | "minute" | "minutes" ⇒ MINUTES - case "s" | "sec" | "second" | "seconds" ⇒ SECONDS - case "ms" | "milli" | "millis" | "millisecond" | "milliseconds" ⇒ MILLISECONDS - case "µs" | "micro" | "micros" | "microsecond" | "microseconds" ⇒ MICROSECONDS - case "ns" | "nano" | "nanos" | "nanosecond" | "nanoseconds" ⇒ NANOSECONDS - } - - val Zero: Duration = new FiniteDuration(0, NANOSECONDS) - val Undefined: Duration = new Duration with Infinite { - override def toString = "Duration.Undefined" - override def equals(other: Any) = other.asInstanceOf[AnyRef] eq this - override def +(other: Duration): Duration = throw new IllegalArgumentException("cannot add Undefined duration") - override def -(other: Duration): Duration = throw new IllegalArgumentException("cannot subtract Undefined duration") - override def *(factor: Double): Duration = throw new IllegalArgumentException("cannot multiply Undefined duration") - override def /(factor: Double): Duration = throw new IllegalArgumentException("cannot divide Undefined duration") - override def /(other: Duration): Double = throw new IllegalArgumentException("cannot divide Undefined duration") - def >(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration") - def >=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration") - def <(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration") - def <=(other: Duration) = throw new IllegalArgumentException("cannot compare Undefined duration") - def unary_- : Duration = throw new IllegalArgumentException("cannot negate Undefined duration") - } - - trait Infinite { - this: Duration ⇒ - - override def equals(other: Any) = false - - def +(other: Duration): Duration = - other match { - case _: this.type ⇒ this - case _: Infinite ⇒ throw new IllegalArgumentException("illegal addition of infinities") - case _ ⇒ this - } - def -(other: Duration): Duration = - other match { - case _: this.type ⇒ throw new IllegalArgumentException("illegal subtraction of infinities") - case _ ⇒ this - } - def *(factor: Double): Duration = this - def /(factor: Double): Duration = this - def /(other: Duration): Double = - other match { - case _: Infinite ⇒ throw new IllegalArgumentException("illegal division of infinities") - // maybe questionable but pragmatic: Inf / 0 => Inf - case x ⇒ Double.PositiveInfinity * (if ((this > Zero) ^ (other >= Zero)) -1 else 1) - } - - def finite_? = false - - def length: Long = throw new IllegalArgumentException("length not allowed on infinite Durations") - def unit: TimeUnit = throw new IllegalArgumentException("unit not allowed on infinite Durations") - def toNanos: Long = throw new IllegalArgumentException("toNanos not allowed on infinite Durations") - def toMicros: Long = throw new IllegalArgumentException("toMicros not allowed on infinite Durations") - def toMillis: Long = throw new IllegalArgumentException("toMillis not allowed on infinite Durations") - def toSeconds: Long = throw new IllegalArgumentException("toSeconds not allowed on infinite Durations") - def toMinutes: Long = throw new IllegalArgumentException("toMinutes not allowed on infinite Durations") - def toHours: Long = throw new IllegalArgumentException("toHours not allowed on infinite Durations") - def toDays: Long = throw new IllegalArgumentException("toDays not allowed on infinite Durations") - def toUnit(unit: TimeUnit): Double = throw new IllegalArgumentException("toUnit not allowed on infinite Durations") - - def printHMS = toString - } - - /** - * Infinite duration: greater than any other and not equal to any other, - * including itself. - */ - val Inf: Duration = new Duration with Infinite { - override def toString = "Duration.Inf" - def >(other: Duration) = true - def >=(other: Duration) = true - def <(other: Duration) = false - def <=(other: Duration) = false - def unary_- : Duration = MinusInf - } - - /** - * Infinite negative duration: lesser than any other and not equal to any other, - * including itself. - */ - val MinusInf: Duration = new Duration with Infinite { - override def toString = "Duration.MinusInf" - def >(other: Duration) = false - def >=(other: Duration) = false - def <(other: Duration) = true - def <=(other: Duration) = true - def unary_- : Duration = Inf - } - - // Java Factories - def create(length: Long, unit: TimeUnit): Duration = apply(length, unit) - def create(length: Double, unit: TimeUnit): Duration = apply(length, unit) - def create(length: Long, unit: String): Duration = apply(length, unit) - def parse(s: String): Duration = unapply(s).get -} - -/** - * Utility for working with java.util.concurrent.TimeUnit durations. - * - *

- * Examples of usage from Java: - *

- * import akka.util.FiniteDuration;
- * import java.util.concurrent.TimeUnit;
- *
- * Duration duration = new FiniteDuration(100, MILLISECONDS);
- * Duration duration = new FiniteDuration(5, "seconds");
- *
- * duration.toNanos();
- * 
- * - *

- * Examples of usage from Scala: - *

- * import akka.util.Duration
- * import java.util.concurrent.TimeUnit
- *
- * val duration = Duration(100, MILLISECONDS)
- * val duration = Duration(100, "millis")
- *
- * duration.toNanos
- * duration < 1.second
- * duration <= Duration.Inf
- * 
- * - *

- * Implicits are also provided for Int, Long and Double. Example usage: - *

- * import akka.util.duration._
- *
- * val duration = 100 millis
- * 
- * - * Extractors, parsing and arithmetic are also included: - *
- * val d = Duration("1.2 µs")
- * val Duration(length, unit) = 5 millis
- * val d2 = d * 2.5
- * val d3 = d2 + 1.millisecond
- * 
- */ -abstract class Duration extends Serializable { - def length: Long - def unit: TimeUnit - def toNanos: Long - def toMicros: Long - def toMillis: Long - def toSeconds: Long - def toMinutes: Long - def toHours: Long - def toDays: Long - def toUnit(unit: TimeUnit): Double - def printHMS: String - def <(other: Duration): Boolean - def <=(other: Duration): Boolean - def >(other: Duration): Boolean - def >=(other: Duration): Boolean - def +(other: Duration): Duration - def -(other: Duration): Duration - def *(factor: Double): Duration - def /(factor: Double): Duration - def /(other: Duration): Double - def unary_- : Duration - def finite_? : Boolean -// def dilated(implicit system: ActorSystem): Duration = this * system.settings.TestTimeFactor - def min(other: Duration): Duration = if (this < other) this else other - def max(other: Duration): Duration = if (this > other) this else other - def sleep(): Unit = Thread.sleep(toMillis) - - // Java API - def lt(other: Duration) = this < other - def lteq(other: Duration) = this <= other - def gt(other: Duration) = this > other - def gteq(other: Duration) = this >= other - def plus(other: Duration) = this + other - def minus(other: Duration) = this - other - def mul(factor: Double) = this * factor - def div(factor: Double) = this / factor - def div(other: Duration) = this / other - def neg() = -this - def isFinite() = finite_? -} - -class FiniteDuration(val length: Long, val unit: TimeUnit) extends Duration { - import Duration._ - - def this(length: Long, unit: String) = this(length, Duration.timeUnit(unit)) - - def toNanos = unit.toNanos(length) - def toMicros = unit.toMicros(length) - def toMillis = unit.toMillis(length) - def toSeconds = unit.toSeconds(length) - def toMinutes = unit.toMinutes(length) - def toHours = unit.toHours(length) - def toDays = unit.toDays(length) - def toUnit(u: TimeUnit) = long2double(toNanos) / NANOSECONDS.convert(1, u) - - override def toString = this match { - case Duration(1, DAYS) ⇒ "1 day" - case Duration(x, DAYS) ⇒ x + " days" - case Duration(1, HOURS) ⇒ "1 hour" - case Duration(x, HOURS) ⇒ x + " hours" - case Duration(1, MINUTES) ⇒ "1 minute" - case Duration(x, MINUTES) ⇒ x + " minutes" - case Duration(1, SECONDS) ⇒ "1 second" - case Duration(x, SECONDS) ⇒ x + " seconds" - case Duration(1, MILLISECONDS) ⇒ "1 millisecond" - case Duration(x, MILLISECONDS) ⇒ x + " milliseconds" - case Duration(1, MICROSECONDS) ⇒ "1 microsecond" - case Duration(x, MICROSECONDS) ⇒ x + " microseconds" - case Duration(1, NANOSECONDS) ⇒ "1 nanosecond" - case Duration(x, NANOSECONDS) ⇒ x + " nanoseconds" - } - - def printHMS = "%02d:%02d:%06.3f".format(toHours, toMinutes % 60, toMillis / 1000.0 % 60) - - def <(other: Duration) = { - if (other.finite_?) { - toNanos < other.asInstanceOf[FiniteDuration].toNanos - } else { - other > this - } - } - - def <=(other: Duration) = { - if (other.finite_?) { - toNanos <= other.asInstanceOf[FiniteDuration].toNanos - } else { - other >= this - } - } - - def >(other: Duration) = { - if (other.finite_?) { - toNanos > other.asInstanceOf[FiniteDuration].toNanos - } else { - other < this - } - } - - def >=(other: Duration) = { - if (other.finite_?) { - toNanos >= other.asInstanceOf[FiniteDuration].toNanos - } else { - other <= this - } - } - - def +(other: Duration) = { - if (!other.finite_?) { - other - } else { - val nanos = toNanos + other.asInstanceOf[FiniteDuration].toNanos - fromNanos(nanos) - } - } - - def -(other: Duration) = { - if (!other.finite_?) { - other - } else { - val nanos = toNanos - other.asInstanceOf[FiniteDuration].toNanos - fromNanos(nanos) - } - } - - def *(factor: Double) = fromNanos(long2double(toNanos) * factor) - - def /(factor: Double) = fromNanos(long2double(toNanos) / factor) - - def /(other: Duration) = if (other.finite_?) long2double(toNanos) / other.toNanos else 0 - - def unary_- = Duration(-length, unit) - - def finite_? = true - - override def equals(other: Any) = - other.isInstanceOf[FiniteDuration] && - toNanos == other.asInstanceOf[FiniteDuration].toNanos - - override def hashCode = toNanos.asInstanceOf[Int] -} - -class DurationInt(n: Int) { - def nanoseconds = Duration(n, NANOSECONDS) - def nanos = Duration(n, NANOSECONDS) - def nanosecond = Duration(n, NANOSECONDS) - def nano = Duration(n, NANOSECONDS) - - def microseconds = Duration(n, MICROSECONDS) - def micros = Duration(n, MICROSECONDS) - def microsecond = Duration(n, MICROSECONDS) - def micro = Duration(n, MICROSECONDS) - - def milliseconds = Duration(n, MILLISECONDS) - def millis = Duration(n, MILLISECONDS) - def millisecond = Duration(n, MILLISECONDS) - def milli = Duration(n, MILLISECONDS) - - def seconds = Duration(n, SECONDS) - def second = Duration(n, SECONDS) - - def minutes = Duration(n, MINUTES) - def minute = Duration(n, MINUTES) - - def hours = Duration(n, HOURS) - def hour = Duration(n, HOURS) - - def days = Duration(n, DAYS) - def day = Duration(n, DAYS) -} - -class DurationLong(n: Long) { - def nanoseconds = Duration(n, NANOSECONDS) - def nanos = Duration(n, NANOSECONDS) - def nanosecond = Duration(n, NANOSECONDS) - def nano = Duration(n, NANOSECONDS) - - def microseconds = Duration(n, MICROSECONDS) - def micros = Duration(n, MICROSECONDS) - def microsecond = Duration(n, MICROSECONDS) - def micro = Duration(n, MICROSECONDS) - - def milliseconds = Duration(n, MILLISECONDS) - def millis = Duration(n, MILLISECONDS) - def millisecond = Duration(n, MILLISECONDS) - def milli = Duration(n, MILLISECONDS) - - def seconds = Duration(n, SECONDS) - def second = Duration(n, SECONDS) - - def minutes = Duration(n, MINUTES) - def minute = Duration(n, MINUTES) - - def hours = Duration(n, HOURS) - def hour = Duration(n, HOURS) - - def days = Duration(n, DAYS) - def day = Duration(n, DAYS) -} - -class DurationDouble(d: Double) { - def nanoseconds = Duration(d, NANOSECONDS) - def nanos = Duration(d, NANOSECONDS) - def nanosecond = Duration(d, NANOSECONDS) - def nano = Duration(d, NANOSECONDS) - - def microseconds = Duration(d, MICROSECONDS) - def micros = Duration(d, MICROSECONDS) - def microsecond = Duration(d, MICROSECONDS) - def micro = Duration(d, MICROSECONDS) - - def milliseconds = Duration(d, MILLISECONDS) - def millis = Duration(d, MILLISECONDS) - def millisecond = Duration(d, MILLISECONDS) - def milli = Duration(d, MILLISECONDS) - - def seconds = Duration(d, SECONDS) - def second = Duration(d, SECONDS) - - def minutes = Duration(d, MINUTES) - def minute = Duration(d, MINUTES) - - def hours = Duration(d, HOURS) - def hour = Duration(d, HOURS) - - def days = Duration(d, DAYS) - def day = Duration(d, DAYS) -} diff --git a/src/library/scala/util/Timeout.scala b/src/library/scala/util/Timeout.scala deleted file mode 100644 index 0190675344..0000000000 --- a/src/library/scala/util/Timeout.scala +++ /dev/null @@ -1,33 +0,0 @@ -/** - * Copyright (C) 2009-2011 Typesafe Inc. - */ -package scala.util - -import java.util.concurrent.TimeUnit - -case class Timeout(duration: Duration) { - def this(timeout: Long) = this(Duration(timeout, TimeUnit.MILLISECONDS)) - def this(length: Long, unit: TimeUnit) = this(Duration(length, unit)) -} - -object Timeout { - /** - * A timeout with zero duration, will cause most requests to always timeout. - */ - val zero = new Timeout(Duration.Zero) - - /** - * A Timeout with infinite duration. Will never timeout. Use extreme caution with this - * as it may cause memory leaks, blocked threads, or may not even be supported by - * the receiver, which would result in an exception. - */ - val never = new Timeout(Duration.Inf) - - def apply(timeout: Long) = new Timeout(timeout) - def apply(length: Long, unit: TimeUnit) = new Timeout(length, unit) - - implicit def durationToTimeout(duration: Duration) = new Timeout(duration) - implicit def intToTimeout(timeout: Int) = new Timeout(timeout) - implicit def longToTimeout(timeout: Long) = new Timeout(timeout) - //implicit def defaultTimeout(implicit system: ActorSystem) = system.settings.ActorTimeout (have to introduce this in ActorSystem) -} diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index ba7dffbcb0..70221c0de1 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -13,7 +13,7 @@ import scala.concurrent.promise import scala.concurrent.blocking import scala.util.{ Try, Success, Failure } -import scala.util.Duration +import scala.concurrent.util.Duration trait TestBase { -- cgit v1.2.3 From d60099fb5543f92a81605873fea2d14637cbf4c6 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Mon, 12 Mar 2012 12:02:47 +0100 Subject: [vpm] test file for regression on old patmat it's a warning on new patmat -- TODO: dig deeper --- test/files/pos/virtpatmat_instof_valuetype.flags | 1 + test/files/pos/virtpatmat_instof_valuetype.scala | 8 ++++++++ 2 files changed, 9 insertions(+) create mode 100644 test/files/pos/virtpatmat_instof_valuetype.flags create mode 100644 test/files/pos/virtpatmat_instof_valuetype.scala (limited to 'test/files') diff --git a/test/files/pos/virtpatmat_instof_valuetype.flags b/test/files/pos/virtpatmat_instof_valuetype.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/pos/virtpatmat_instof_valuetype.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/pos/virtpatmat_instof_valuetype.scala b/test/files/pos/virtpatmat_instof_valuetype.scala new file mode 100644 index 0000000000..1dda9bf57c --- /dev/null +++ b/test/files/pos/virtpatmat_instof_valuetype.scala @@ -0,0 +1,8 @@ +case class Data(private val t: Option[String] = None, only: Boolean = false) { + def add(other: Data) = { + other match { + case Data(None, b) => () + case Data(Some(_), b) => () + } + } +} \ No newline at end of file -- cgit v1.2.3 From 972bf59a65d98286697ca8eed6a80239259808e4 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Tue, 28 Feb 2012 16:47:47 +0100 Subject: [vpm] TODO note: make unapply type list stricter when an unapply returns Option[T] where T is some ProductN, does that mean the unapply returns 1 result, i.e., that T, or did it mean to return N results? to disambiguate, falling back to stricter spec-adherence, which requires T be exactly TupleN for N results for now, allow extractor result to be any product, not just tuple --- src/compiler/scala/tools/nsc/typechecker/Unapplies.scala | 5 +++++ test/files/run/virtpatmat_extends_product.check | 1 + test/files/run/virtpatmat_extends_product.flags | 1 + test/files/run/virtpatmat_extends_product.scala | 11 +++++++++++ 4 files changed, 18 insertions(+) create mode 100644 test/files/run/virtpatmat_extends_product.check create mode 100644 test/files/run/virtpatmat_extends_product.flags create mode 100644 test/files/run/virtpatmat_extends_product.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala index cc272b7b8d..4f5b6868ae 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Unapplies.scala @@ -45,6 +45,11 @@ trait Unapplies extends ast.TreeDSL case BooleanClass => Nil case OptionClass | SomeClass => val prod = tp.typeArgs.head +// the spec doesn't allow just any subtype of Product, it *must* be TupleN[...] -- see run/virtpatmat_extends_product.scala +// this breaks plenty of stuff, though... +// val targs = +// if (isTupleType(prod)) getProductArgs(prod) +// else List(prod) val targs = getProductArgs(prod) if (targs.isEmpty || targs.tail.isEmpty) List(prod) // special n == 0 || n == 1 diff --git a/test/files/run/virtpatmat_extends_product.check b/test/files/run/virtpatmat_extends_product.check new file mode 100644 index 0000000000..c07e8385a7 --- /dev/null +++ b/test/files/run/virtpatmat_extends_product.check @@ -0,0 +1 @@ +AnnotationInfo(a,1) diff --git a/test/files/run/virtpatmat_extends_product.flags b/test/files/run/virtpatmat_extends_product.flags new file mode 100644 index 0000000000..ac6b805bd0 --- /dev/null +++ b/test/files/run/virtpatmat_extends_product.flags @@ -0,0 +1 @@ +-Yvirtpatmat diff --git a/test/files/run/virtpatmat_extends_product.scala b/test/files/run/virtpatmat_extends_product.scala new file mode 100644 index 0000000000..e564f4430b --- /dev/null +++ b/test/files/run/virtpatmat_extends_product.scala @@ -0,0 +1,11 @@ +object Test extends App { + case class AnnotationInfo(a: String, b: Int) extends Product2[String, Int] + + // if we're not careful in unapplyTypeListFromReturnType, the generated unapply is + // thought to return two components instead of one, since AnnotationInfo (the result of the unapply) is a Product2 + case class NestedAnnotArg(ai: AnnotationInfo) + + NestedAnnotArg(AnnotationInfo("a", 1)) match { + case NestedAnnotArg(x) => println(x) + } +} \ No newline at end of file -- cgit v1.2.3 From b046a6e3316df8b27ac31e71da1a139c800ccce7 Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Wed, 14 Mar 2012 11:47:59 +0100 Subject: [vpm] typer synths Function{} for empty-sel match typedMatchAnonFun is called from typedFunction when the function's body is a match this is work-in-progres: the compiler currently won't bootstrap under -Yvirtpatmat see also the pending test make sure to use the right context in typeFunction when the body is a Match when typer is set up for type checking a Function, the current owner is the symbol for the function, but we'll type check a Block(List(ClassDef(cd)), New(cd)) when the function is a match, and the function symbol is nowhere to be found, so go to outer context in patmatvirt: - simplified default case gen (no need for a Casegen instance) - using CASE | SYNTHETIC to detect generated matches (for switches) and avoid typing them endlessly more uniform, and necessary for new-style anon Function class instance gen for matches --- src/compiler/scala/reflect/internal/Trees.scala | 5 +- .../scala/tools/nsc/transform/UnCurry.scala | 86 +++---- .../tools/nsc/typechecker/PatMatVirtualiser.scala | 141 ++++++----- .../scala/tools/nsc/typechecker/Typers.scala | 210 +++++++++++++---- test/files/pos/virtpatmat_anonfun_for.flags | 1 + test/files/pos/virtpatmat_anonfun_for.scala | 8 + test/files/run/virtpatmat_partial.check | 17 +- test/files/run/virtpatmat_partial.scala | 257 ++++++++++++++------- .../run/virtpatmat_anonfun_underscore.check | 0 .../run/virtpatmat_anonfun_underscore.flags | 1 + .../run/virtpatmat_anonfun_underscore.scala | 4 + 11 files changed, 481 insertions(+), 249 deletions(-) create mode 100644 test/files/pos/virtpatmat_anonfun_for.flags create mode 100644 test/files/pos/virtpatmat_anonfun_for.scala create mode 100644 test/pending/run/virtpatmat_anonfun_underscore.check create mode 100644 test/pending/run/virtpatmat_anonfun_underscore.flags create mode 100644 test/pending/run/virtpatmat_anonfun_underscore.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index 9b1712b790..1a40e0105c 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -350,8 +350,9 @@ trait Trees extends api.Trees { self: SymbolTable => "subst[%s, %s](%s)".format(fromStr, toStr, (from, to).zipped map (_ + " -> " + _) mkString ", ") } - // NOTE: if symbols in `from` occur multiple times in the `tree` passed to `transform`, - // the resulting Tree will be a graph, not a tree... this breaks all sorts of stuff, + // NOTE: calls shallowDuplicate on trees in `to` to avoid problems when symbols in `from` + // occur multiple times in the `tree` passed to `transform`, + // otherwise, the resulting Tree would be a graph, not a tree... this breaks all sorts of stuff, // notably concerning the mutable aspects of Trees (such as setting their .tpe) class TreeSubstituter(from: List[Symbol], to: List[Tree]) extends Transformer { override def transform(tree: Tree): Tree = tree match { diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index ee565530b7..03bef83a90 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -237,8 +237,10 @@ abstract class UnCurry extends InfoTransform def targs = fun.tpe.typeArgs def isPartial = fun.tpe.typeSymbol == PartialFunctionClass + // if the function was eta-expanded, it's not a match without a selector if (fun1 ne fun) fun1 else { + assert(!(opt.virtPatmat && isPartial)) // empty-selector matches have already been translated into instantiations of anonymous (partial) functions val (formals, restpe) = (targs.init, targs.last) val anonClass = owner.newAnonymousFunctionClass(fun.pos, inConstructorFlag) def parents = @@ -286,52 +288,54 @@ abstract class UnCurry extends InfoTransform def defaultCase = CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) - val casesNoSynthCatchAll = dropSyntheticCatchAll(cases) +// val casesNoSynthCatchAll = dropSyntheticCatchAll(cases) gen.mkUncheckedMatch( - if (casesNoSynthCatchAll exists treeInfo.isDefaultCase) Literal(Constant(true)) - else substTree(wrap(Match(selector, (casesNoSynthCatchAll map transformCase) :+ defaultCase)).duplicate) + if (cases exists treeInfo.isDefaultCase) Literal(Constant(true)) + else substTree(wrap(Match(selector, (cases map transformCase) :+ defaultCase)).duplicate) ) } - override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = { - object noOne extends Transformer { - override val treeCopy = newStrictTreeCopier // must duplicate everything - val one = _match.tpe member newTermName("one") - override def transform(tree: Tree): Tree = tree match { - case Apply(fun, List(a)) if fun.symbol == one => - // blow one's argument away since all we want to know is whether the match succeeds or not - // (the alternative, making `one` CBN, would entail moving away from Option) - Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe))) - case _ => - super.transform(tree) - } - } - substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher)))) - } - - override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = { - object dropMatchResAssign extends Transformer { - // override val treeCopy = newStrictTreeCopier // will duplicate below - override def transform(tree: Tree): Tree = tree match { - // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing - case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol => - Block(List(assignKeepGoing), zero) - case _ => - super.transform(tree) - } - } - val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList - val idaBlock = wrap(Block( - zero :: - x :: - /* drop matchRes def */ - keepGoing :: - statsNoMatchRes, - NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing` - )) - substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed - } + override def caseVirtualizedMatch(orig: Tree, _match: Tree, targs: List[Tree], scrut: Tree, matcher: Tree): Tree = {assert(false); orig} + // { + // object noOne extends Transformer { + // override val treeCopy = newStrictTreeCopier // must duplicate everything + // val one = _match.tpe member newTermName("one") + // override def transform(tree: Tree): Tree = tree match { + // case Apply(fun, List(a)) if fun.symbol == one => + // // blow one's argument away since all we want to know is whether the match succeeds or not + // // (the alternative, making `one` CBN, would entail moving away from Option) + // Apply(fun.duplicate, List(gen.mkZeroContravariantAfterTyper(a.tpe))) + // case _ => + // super.transform(tree) + // } + // } + // substTree(Apply(Apply(TypeApply(Select(_match.duplicate, _match.tpe.member(newTermName("isSuccess"))), targs map (_.duplicate)), List(scrut.duplicate)), List(noOne.transform(matcher)))) + // } + + override def caseVirtualizedMatchOpt(orig: Tree, zero: ValDef, x: ValDef, matchRes: ValDef, keepGoing: ValDef, stats: List[Tree], epilogue: Tree, wrap: Tree => Tree) = {assert(false); orig} + // { + // object dropMatchResAssign extends Transformer { + // // override val treeCopy = newStrictTreeCopier // will duplicate below + // override def transform(tree: Tree): Tree = tree match { + // // don't compute the result of the match -- remove the block for the RHS (emitted by pmgen.one), except for the assignment to keepGoing + // case gen.VirtualCaseDef(assignKeepGoing, matchRes, zero) if assignKeepGoing.lhs.symbol eq keepGoing.symbol => + // Block(List(assignKeepGoing), zero) + // case _ => + // super.transform(tree) + // } + // } + // val statsNoMatchRes: List[Tree] = stats map (dropMatchResAssign.transform) toList + // val idaBlock = wrap(Block( + // zero :: + // x :: + // /* drop matchRes def */ + // keepGoing :: + // statsNoMatchRes, + // NOT(REF(keepGoing.symbol)) // replace `if (keepGoing) throw new MatchError(...) else matchRes` epilogue by `!keepGoing` + // )) + // substTree(idaBlock.duplicate) // duplicate on block as a whole to ensure valdefs are properly cloned and substed + // } } DefDef(m, isDefinedAtTransformer(fun.body)) diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 0422da54e0..34fefd20fe 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -49,7 +49,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => } object MatchTranslator { - def apply(typer: Typer): MatchTranslation = { + def apply(typer: Typer): MatchTranslation with CodegenCore = { import typer._ // typing `_match` to decide which MatchTranslator to create adds 4% to quick.comp.timer newTyper(context.makeImplicit(reportAmbiguousErrors = false)).silent(_.typed(Ident(vpmName._match), EXPRmode, WildcardType), reportAmbiguousErrors = false) match { @@ -116,10 +116,6 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => trait MatchTranslation extends MatchMonadInterface { self: TreeMakers with CodegenCore => import typer.{typed, context, silent, reallyExists} - private def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match { - case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args) - case _ => tp - } /** Implement a pattern match by turning its cases (including the implicit failure case) * into the corresponding (monadic) extractors, and combining them with the `orElse` combinator. @@ -131,18 +127,15 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => * thus, you must typecheck the result (and that will in turn translate nested matches) * this could probably optimized... (but note that the matchStrategy must be solved for each nested patternmatch) */ - def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type): Tree = { + def translateMatch(scrut: Tree, cases: List[CaseDef], pt: Type, scrutType: Type, matchFailGenOverride: Option[Tree => Tree] = None): Tree = { // we don't transform after typers // (that would require much more sophistication when generating trees, // and the only place that emits Matches after typers is for exception handling anyway) assert(phase.id <= currentRun.typerPhase.id, phase) - val scrutType = repeatedToSeq(elimAnonymousClass(scrut.tpe.widen)) - - val scrutSym = freshSym(scrut.pos, pureType(scrutType)) - val okPt = repeatedToSeq(pt) + val scrutSym = freshSym(scrut.pos, pureType(scrutType)) setFlag (Flags.CASE | SYNTHETIC) // the flags allow us to detect generated matches by looking at the scrutinee's symbol (needed to avoid recursing endlessly on generated switches) // pt = Any* occurs when compiling test/files/pos/annotDepMethType.scala with -Xexperimental - combineCases(scrut, scrutSym, cases map translateCase(scrutSym, okPt), okPt, matchOwner) + combineCases(scrut, scrutSym, cases map translateCase(scrutSym, pt), pt, matchOwner, matchFailGenOverride) } // return list of typed CaseDefs that are supported by the backend (typed/bind/wildcard) @@ -154,13 +147,12 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => // if they're already simple enough to be handled by the back-end, we're done if (caseDefs forall treeInfo.isCatchCase) caseDefs else { - val okPt = repeatedToSeq(pt) val switch = { val bindersAndCases = caseDefs map { caseDef => // generate a fresh symbol for each case, hoping we'll end up emitting a type-switch (we don't have a global scrut there) // if we fail to emit a fine-grained switch, have to do translateCase again with a single scrutSym (TODO: uniformize substitution on treemakers so we can avoid this) val caseScrutSym = freshSym(pos, pureType(ThrowableClass.tpe)) - (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, okPt)(caseDef), EmptySubstitution)) + (caseScrutSym, propagateSubstitution(translateCase(caseScrutSym, pt)(caseDef), EmptySubstitution)) } (emitTypeSwitch(bindersAndCases, pt) map (_.map(fixerUpper(matchOwner, pos).apply(_).asInstanceOf[CaseDef]))) @@ -168,7 +160,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => val catches = switch getOrElse { val scrutSym = freshSym(pos, pureType(ThrowableClass.tpe)) - val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, okPt)(caseDef), EmptySubstitution))} + val casesNoSubstOnly = caseDefs map { caseDef => (propagateSubstitution(translateCase(scrutSym, pt)(caseDef), EmptySubstitution))} val exSym = freshSym(pos, pureType(ThrowableClass.tpe), "ex") @@ -177,7 +169,7 @@ trait PatMatVirtualiser extends ast.TreeDSL { self: Analyzer => CaseDef( Bind(exSym, Ident(nme.WILDCARD)), // TODO: does this need fixing upping? EmptyTree, - combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, casegen => scrut => Throw(CODE.REF(exSym))) + combineCasesNoSubstOnly(CODE.REF(exSym), scrutSym, casesNoSubstOnly, pt, matchOwner, Some(scrut => Throw(CODE.REF(exSym)))) ) }) } @@ -706,10 +698,10 @@ class Foo(x: Other) { x._1 } // no error in this order def optimizeCases(prevBinder: Symbol, cases: List[List[TreeMaker]], pt: Type): (List[List[TreeMaker]], List[Tree]) = (cases, Nil) - def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = + def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = None - // for catch + // for catch (no need to customize match failure) def emitTypeSwitch(bindersAndCases: List[(Symbol, List[TreeMaker])], pt: Type): Option[List[CaseDef]] = None @@ -925,7 +917,7 @@ class Foo(x: Other) { x._1 } // no error in this order ((casegen: Casegen) => combineExtractors(altTreeMakers :+ TrivialTreeMaker(casegen.one(TRUE)))(casegen)) ) - val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some((casegen: Casegen) => x => casegen.one(FALSE))) + val findAltMatcher = codegenAlt.matcher(EmptyTree, NoSymbol, BooleanClass.tpe)(combinedAlts, Some(x => FALSE)) codegenAlt.ifThenElseZero(findAltMatcher, substitution(next)) } } @@ -936,6 +928,12 @@ class Foo(x: Other) { x._1 } // no error in this order override def toString = "G("+ guardTree +")" } + // combineExtractors changes the current substitution's of the tree makers in `treeMakers` + // requires propagateSubstitution(treeMakers) has been called + def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree = + treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen)) + + def removeSubstOnly(makers: List[TreeMaker]) = makers filterNot (_.isInstanceOf[SubstOnlyTreeMaker]) // a foldLeft to accumulate the localSubstitution left-to-right @@ -950,42 +948,42 @@ class Foo(x: Other) { x._1 } // no error in this order } // calls propagateSubstitution on the treemakers - def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol): Tree = { - val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them - combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, casegen => CODE.MATCHERROR(_)) + def combineCases(scrut: Tree, scrutSym: Symbol, casesRaw: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = { + // drops SubstOnlyTreeMakers, since their effect is now contained in the TreeMakers that follow them + val casesNoSubstOnly = casesRaw map (propagateSubstitution(_, EmptySubstitution)) + combineCasesNoSubstOnly(scrut, scrutSym, casesNoSubstOnly, pt, owner, matchFailGenOverride) } - def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFail: Casegen => Tree => Tree): Tree = fixerUpper(owner, scrut.pos){ - val ptDefined = if (isFullyDefined(pt)) pt else NoType - - emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt).getOrElse{ - if (casesNoSubstOnly nonEmpty) { - // check casesNoSubstOnly for presence of a default case, since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one - // exhaustivity and reachability must be checked before optimization as well - // TODO: improve, a trivial type test before the body still makes for a default case - // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op) - val catchAll = - if (casesNoSubstOnly.nonEmpty && { - val nonTrivLast = casesNoSubstOnly.last - nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] - }) None - else Some(matchFail) - - val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt) - - val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, catchAll) - - if (toHoist isEmpty) matchRes else Block(toHoist, matchRes) - } else { - codegen.matcher(scrut, scrutSym, pt)(Nil, Some(matchFail)) + def combineCasesNoSubstOnly(scrut: Tree, scrutSym: Symbol, casesNoSubstOnly: List[List[TreeMaker]], pt: Type, owner: Symbol, matchFailGenOverride: Option[Tree => Tree]): Tree = + fixerUpper(owner, scrut.pos){ + val ptDefined = if (isFullyDefined(pt)) pt else NoType + def matchFailGen = (matchFailGenOverride orElse Some(CODE.MATCHERROR(_: Tree))) + + emitSwitch(scrut, scrutSym, casesNoSubstOnly, pt, matchFailGenOverride).getOrElse{ + if (casesNoSubstOnly nonEmpty) { + // before optimizing, check casesNoSubstOnly for presence of a default case, + // since DCE will eliminate trivial cases like `case _ =>`, even if they're the last one + // exhaustivity and reachability must be checked before optimization as well + // TODO: improve notion of trivial/irrefutable -- a trivial type test before the body still makes for a default case + // ("trivial" depends on whether we're emitting a straight match or an exception, or more generally, any supertype of scrutSym.tpe is a no-op) + // irrefutability checking should use the approximation framework also used for CSE, unreachability and exhaustivity checking + val synthCatchAll = + if (casesNoSubstOnly.nonEmpty && { + val nonTrivLast = casesNoSubstOnly.last + nonTrivLast.nonEmpty && nonTrivLast.head.isInstanceOf[BodyTreeMaker] + }) None + else matchFailGen + + val (cases, toHoist) = optimizeCases(scrutSym, casesNoSubstOnly, pt) + + val matchRes = codegen.matcher(scrut, scrutSym, pt)(cases map combineExtractors, synthCatchAll) + + if (toHoist isEmpty) matchRes else Block(toHoist, matchRes) + } else { + codegen.matcher(scrut, scrutSym, pt)(Nil, matchFailGen) + } } } - } - - // combineExtractors changes the current substitution's of the tree makers in `treeMakers` - // requires propagateSubstitution(treeMakers) has been called - def combineExtractors(treeMakers: List[TreeMaker])(casegen: Casegen): Tree = - treeMakers.foldRight(EmptyTree: Tree)((a, b) => a.chainBefore(b)(casegen)) // TODO: do this during tree construction, but that will require tracking the current owner in treemakers // TODO: assign more fine-grained positions @@ -1043,7 +1041,7 @@ class Foo(x: Other) { x._1 } // no error in this order // codegen relevant to the structure of the translation (how extractors are combined) trait AbsCodegen { - def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], catchAllGen: Option[Casegen => Tree => Tree]): Tree + def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree // local / context-free def _asInstanceOf(b: Symbol, tp: Type): Tree @@ -1136,13 +1134,13 @@ class Foo(x: Other) { x._1 } // no error in this order //// methods in MatchingStrategy (the monad companion) -- used directly in translation // __match.runOrElse(`scrut`)(`scrutSym` => `matcher`) // TODO: consider catchAll, or virtualized matching will break in exception handlers - def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], catchAllGen: Option[Casegen => Tree => Tree]): Tree = + def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = _match(vpmName.runOrElse) APPLY (scrut) APPLY (fun(scrutSym, cases map (f => f(this)) reduceLeft typedOrElse)) // __match.one(`res`) def one(res: Tree): Tree = (_match(vpmName.one)) (res) // __match.zero - def zero: Tree = _match(vpmName.zero) + protected def zero: Tree = _match(vpmName.zero) // __match.guard(`c`, `then`) def guard(c: Tree, then: Tree): Tree = _match(vpmName.guard) APPLY (c, then) @@ -1517,7 +1515,7 @@ class Foo(x: Other) { x._1 } // no error in this order } } - class RegularSwitchMaker(scrutSym: Symbol) extends SwitchMaker { + class RegularSwitchMaker(scrutSym: Symbol, matchFailGenOverride: Option[Tree => Tree]) extends SwitchMaker { val switchableTpe = Set(ByteClass.tpe, ShortClass.tpe, IntClass.tpe, CharClass.tpe) val alternativesSupported = true @@ -1540,14 +1538,14 @@ class Foo(x: Other) { x._1 } // no error in this order } def defaultSym: Symbol = scrutSym - def defaultBody: Tree = { import CODE._; MATCHERROR(REF(scrutSym)) } + def defaultBody: Tree = { import CODE._; matchFailGenOverride map (gen => gen(REF(scrutSym))) getOrElse MATCHERROR(REF(scrutSym)) } def defaultCase(scrutSym: Symbol = defaultSym, body: Tree = defaultBody): CaseDef = { import CODE._; atPos(body.pos) { DEFAULT ==> body }} } - override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type): Option[Tree] = { import CODE._ - val regularSwitchMaker = new RegularSwitchMaker(scrutSym) + override def emitSwitch(scrut: Tree, scrutSym: Symbol, cases: List[List[TreeMaker]], pt: Type, matchFailGenOverride: Option[Tree => Tree]): Option[Tree] = { import CODE._ + val regularSwitchMaker = new RegularSwitchMaker(scrutSym, matchFailGenOverride) // TODO: if patterns allow switch but the type of the scrutinee doesn't, cast (type-test) the scrutinee to the corresponding switchable type and switch on the result if (regularSwitchMaker.switchableTpe(scrutSym.tpe)) { val caseDefsWithDefault = regularSwitchMaker(cases map {c => (scrutSym, c)}, pt) @@ -1555,7 +1553,7 @@ class Foo(x: Other) { x._1 } // no error in this order else { // match on scrutSym -- converted to an int if necessary -- not on scrut directly (to avoid duplicating scrut) val scrutToInt: Tree = - if(scrutSym.tpe =:= IntClass.tpe) REF(scrutSym) + if (scrutSym.tpe =:= IntClass.tpe) REF(scrutSym) else (REF(scrutSym) DOT (nme.toInt)) Some(BLOCK( VAL(scrutSym) === scrut, @@ -1631,7 +1629,7 @@ class Foo(x: Other) { x._1 } // no error in this order * the matcher's optional result is encoded as a flag, keepGoing, where keepGoing == true encodes result.isEmpty, * if keepGoing is false, the result Some(x) of the naive translation is encoded as matchRes == x */ - def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], catchAllGen: Option[Casegen => Tree => Tree]): Tree = { + def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = { val matchEnd = NoSymbol.newLabel(freshName("matchEnd"), NoPosition) setFlag (SYNTHETIC | Flags.CASE) val matchRes = NoSymbol.newValueParameter(newTermName("x"), NoPosition, SYNTHETIC) setInfo restpe matchEnd setInfo MethodType(List(matchRes), restpe) @@ -1645,38 +1643,35 @@ class Foo(x: Other) { x._1 } // no error in this order LabelDef(currCase, Nil, mkCase(casegen)) } - def catchAll = catchAllGen map { catchAllGen => - val casegen = new OptimizedCasegen(matchEnd, NoSymbol) - val scrutRef = if(scrutSym eq NoSymbol) EmptyTree else REF(scrutSym) - LabelDef(nextCase, Nil, catchAllGen(casegen)(scrutRef)) + def catchAll = matchFailGen map { matchFailGen => + val scrutRef = if(scrutSym ne NoSymbol) REF(scrutSym) else EmptyTree // for alternatives + LabelDef(nextCase, Nil, matchEnd APPLY (matchFailGen(scrutRef))) // need to jump to matchEnd with result generated by matchFailGen (could be `FALSE` for isDefinedAt) } toList + // catchAll.isEmpty iff no synthetic default case needed (the (last) user-defined case is a default) + // if the last user-defined case is a default, it will never jump to the next case; it will go immediately to matchEnd // the generated block is taken apart in TailCalls under the following assumptions // the assumption is once we encounter a case, the remainder of the block will consist of cases // the prologue may be empty, usually it is the valdef that stores the scrut // val (prologue, cases) = stats span (s => !s.isInstanceOf[LabelDef]) - val prologue = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil + // scrutSym == NoSymbol when generating an alternatives matcher + val scrutDef = if(scrutSym ne NoSymbol) List(VAL(scrutSym) === scrut) else Nil // for alternatives Block( - prologue ++ (cases map caseDef) ++ catchAll, + scrutDef ++ (cases map caseDef) ++ catchAll, LabelDef(matchEnd, List(matchRes), REF(matchRes)) ) } class OptimizedCasegen(matchEnd: Symbol, nextCase: Symbol) extends CommonCodegen with Casegen { - def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], catchAllGen: Option[Casegen => Tree => Tree]): Tree = - optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, catchAllGen) - - def zero: Tree = nextCase APPLY () + def matcher(scrut: Tree, scrutSym: Symbol, restpe: Type)(cases: List[Casegen => Tree], matchFailGen: Option[Tree => Tree]): Tree = + optimizedCodegen.matcher(scrut, scrutSym, restpe)(cases, matchFailGen) // only used to wrap the RHS of a body // res: T // returns MatchMonad[T] def one(res: Tree): Tree = matchEnd APPLY (res) - - - override def ifThenElseZero(c: Tree, then: Tree): Tree = - IF (c) THEN then ELSE zero + protected def zero: Tree = nextCase APPLY () // prev: MatchMonad[T] // b: T diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 0dd4b37131..506e347828 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2143,6 +2143,137 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt) } + def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe)) + + def translateMatch(selector: Tree, cases: List[CaseDef], mode: Int, resTp: Type, scrutTp: Type = NoType, matchFailGen: Option[Tree => Tree] = None) = { + val selector1 = if(scrutTp eq NoType) checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) else selector + val selectorTp = if(scrutTp eq NoType) packCaptured(selector1.tpe.widen) else scrutTp + val casesTyped = typedCases(cases, selectorTp, resTp) + val (ownType, needAdapt) = if (isFullyDefined(resTp)) (resTp, false) else weakLub(casesTyped map (_.tpe.deconst)) + val casesAdapted = if (!needAdapt) casesTyped else casesTyped map (adaptCase(_, mode, ownType)) + // val (owntype0, needAdapt) = ptOrLub(casesTyped map (x => repackExistential(x.tpe))) + // val owntype = elimAnonymousClass(owntype0) + + def repeatedToSeq(tp: Type): Type = (tp baseType RepeatedParamClass) match { + case TypeRef(_, RepeatedParamClass, args) => appliedType(SeqClass.typeConstructor, args) + case _ => tp + } + + def isSynthSelector(selector: Tree): Boolean = selector match { + case Ident(_) if selector.symbol.hasFlag(SYNTHETIC | CASE) => true + case Select(sel, nme.toInt) => isSynthSelector(sel) // switch may need to convert to int first + case _ => false + } + + if (isSynthSelector(selector1)) { // a switch + (Match(selector1, casesAdapted) setType ownType, ownType) // setType of the Match to avoid recursing endlessly + } else { + val scrutType = repeatedToSeq(elimAnonymousClass(selectorTp)) + (MatchTranslator(this).translateMatch(selector1, casesAdapted, repeatedToSeq(ownType), scrutType, matchFailGen), ownType) + } + } + + // TODO: use this to synthesize (partial)function implementation for matches from the get-go, + // instead of the dirty post-factum hacks in uncurry -- typedMatchAnonFun is currently not used due to mindboggling failures (see virtpatmat_anonfun_for.scala) + def typedMatchAnonFun(tree: Tree, cases: List[CaseDef], mode: Int, pt0: Type, selOverride: Option[(List[Symbol], Tree)] = None) = { + val pt = deskolemizeGADTSkolems(pt0) + val targs = pt.normalize.typeArgs + val arity = if (isFunctionType(pt)) targs.length - 1 else 1 + val scrutTp0 = if (arity == 1) targs.head else /* arity > 1 */ tupleType(targs.init) + val scrutTp = packCaptured(scrutTp0) + val ptRes = targs.last // may not be fully defined + val isPartial = pt.typeSymbol == PartialFunctionClass + val cname = tpnme.ANON_FUN_NAME + val funThis = This(cname) + // used to create a new context for pattern matching translation so that + // we can easily rejig the owner structure when we have the actual symbols for these methods + // (after type checking them, but type checking requires translation -- this seems like the easiest way to end this vicious cycle) + val applySentinel = NoSymbol.newMethod(nme.apply) + val idaSentinel = NoSymbol.newMethod(nme._isDefinedAt) + + def mkParams = { + val params = + for (i <- List.range(0, arity)) yield atPos(tree.pos.focusStart) { + ValDef(Modifiers(SYNTHETIC | PARAM), unit.freshTermName("x" + i + "$"), TypeTree(), EmptyTree) + } + val ids = params map (p => Ident(p.name)) + + val paramsRef = selOverride match { + case None => atPos(tree.pos.focusStart) { if (arity == 1) ids.head else gen.mkTuple(ids) } + case Some((_, sel)) => sel.duplicate // we'll replace the symbols that refer to the function's original syms by the ones introduced by the DefDef once the method's been type checked (until then, we don't know them) + } + + (params, paramsRef) // paramsRef can't be typed until after match has been translated, thus supply explicit scrutTp to translate below + } + + import CODE._ + + // need to duplicate the cases before typing them to generate the apply method, or the symbols will be all messed up + val casesTrue = if (isPartial) cases map (c => deriveCaseDef(c)(x => TRUE).duplicate) else Nil + + val (applyMethod, parents) = { + val (params, paramsRef) = mkParams + val (body, resTp) = newTyper(context.make(context.tree, applySentinel)).translateMatch(paramsRef, cases, mode, ptRes, scrutTp, if (isPartial) Some(scrut => (funThis DOT nme.missingCase) (scrut)) else None) + + def abstractFunctionType = { + val sym = AbstractFunctionClass(arity) + typeRef(sym.typeConstructor.prefix, sym, targs.init :+ resTp) + } + + val parents = + if (isFunctionType(pt)) List(abstractFunctionType, SerializableClass.tpe) + else if (isPartial) List(appliedType(AbstractPartialFunctionClass.typeConstructor, List(scrutTp, resTp)), SerializableClass.tpe) + else List(ObjectClass.tpe, pt, SerializableClass.tpe) + + (atPos(tree.pos.focus)(DefDef(Modifiers(FINAL), nme.apply, Nil, List(params), TypeTree() setType resTp, body)), parents) + } + + def isDefinedAtMethod = { + val (params, paramsRef) = mkParams + val (body, _) = newTyper(context.make(context.tree, idaSentinel)).translateMatch(paramsRef, casesTrue, mode, BooleanClass.tpe, scrutTp, Some(scrutinee => FALSE)) + atPos(tree.pos.focus)( + DefDef(Modifiers(FINAL), nme._isDefinedAt, Nil, List(params), TypeTree() setType BooleanClass.tpe, body) + ) + } + + val members = if (!isPartial) List(applyMethod) else List(applyMethod, isDefinedAtMethod) + + val cmods = Modifiers(FINAL | SYNTHETIC /*TODO: when do we need INCONSTRUCTOR ?*/) withAnnotations ( + List(NEW(SerialVersionUIDAttr, LIT(0)))) + val cdef = + ClassDef(cmods, cname, Nil, + Template(parents map (TypeTree() setType _), emptyValDef, Modifiers(0), Nil, List(Nil), members, tree.pos) + ) + val funInst = (Block(List(cdef), Apply(Select(New(Ident(cname)), nme.CONSTRUCTOR), Nil))) + + val res = typed(funInst, mode, pt) + + // now that we have the symbols corresponding to the apply/isDefinedAt methods, + // we can fix up the result of fixerUpper... URGH + // fixerUpper nests the top-level definitions generated in the match under context.owner, but they should be owner by the apply/isDefinedAt method + res foreach { + case d: DefDef if (d.symbol.name == nme.apply) => + d.rhs.changeOwner(applySentinel -> d.symbol) + case d: DefDef if (d.symbol.name == nme._isDefinedAt) => + d.rhs.changeOwner(idaSentinel -> d.symbol) + case _ => + } + + selOverride match { + case None => res + case Some((paramSyms, sel)) => + object substParamSyms extends Transformer { + override def transform(t: Tree): Tree = t match { + case d: DefDef if (d.symbol.name == nme.apply) || (d.symbol.name == nme._isDefinedAt) && (d.symbol.owner == res.tpe.typeSymbol) => + deriveDefDef(d)(rhs => rhs.substTreeSyms(paramSyms, d.vparamss.head.map(_.symbol))) + case _ => + super.transform(t) + } + } + substParamSyms.transform(res) + } + } + /** * @param fun ... * @param mode ... @@ -2155,14 +2286,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { return MaxFunctionArityError(fun) def decompose(pt: Type): (Symbol, List[Type], Type) = - if ((isFunctionType(pt) - || - pt.typeSymbol == PartialFunctionClass && - numVparams == 1 && fun.body.isInstanceOf[Match]) - && // see bug901 for a reason why next conditions are needed - (pt.normalize.typeArgs.length - 1 == numVparams - || - fun.vparams.exists(_.tpt.isEmpty))) + if ((isFunctionType(pt) || (pt.typeSymbol == PartialFunctionClass && numVparams == 1 && fun.body.isInstanceOf[Match])) && // see bug901 for a reason why next conditions are needed + ( pt.normalize.typeArgs.length - 1 == numVparams + || fun.vparams.exists(_.tpt.isEmpty) + )) (pt.typeSymbol, pt.normalize.typeArgs.init, pt.normalize.typeArgs.last) else (FunctionClass(numVparams), fun.vparams map (x => NoType), WildcardType) @@ -2204,13 +2331,27 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { // for (vparam <- vparams) { // checkNoEscaping.locals(context.scope, WildcardType, vparam.tpt); () // } - val body1 = typed(fun.body, respt) - val formals = vparamSyms map (_.tpe) - val restpe = packedType(body1, fun.symbol).deconst.resultType - val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe) -// body = checkNoEscaping.locals(context.scope, restpe, body) - treeCopy.Function(fun, vparams, body1).setType(funtpe) - } + + def recompose(from: Type, to: Type) = + if(clazz == PartialFunctionClass) appliedType(PartialFunctionClass.typeConstructor, List(from, to)) + else functionType(List(from), to) + + fun.body match { + case Match(sel, cases) if opt.virtPatmat => + val typedSel = typed(sel, EXPRmode | BYVALmode, WildcardType) + // go to outer context -- must discard the context that was created for the Function since we're discarding the function + // thus, its symbol, which serves as the current context.owner, is not the right owner + // you won't know you're using the wrong owner until lambda lift crashes (unless you know better than to use the wrong owner) + newTyper(context.outer).typedMatchAnonFun(fun, cases, mode, recompose(typedSel.tpe, respt), Some((vparamSyms, typedSel))) + case _ => + val body1 = typed(fun.body, respt) + val formals = vparamSyms map (_.tpe) + val restpe = packedType(body1, fun.symbol).deconst.resultType + val funtpe = typeRef(clazz.tpe.prefix, clazz, formals :+ restpe) + // body = checkNoEscaping.locals(context.scope, restpe, body) + treeCopy.Function(fun, vparams, body1).setType(funtpe) + } + } } def typedRefinement(stats: List[Tree]) { @@ -3431,7 +3572,10 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } def typedMatch(tree: Tree, selector: Tree, cases: List[CaseDef]): Tree = { - if (selector == EmptyTree) { + if (opt.virtPatmat && !isPastTyper) { + if (selector ne EmptyTree) typed(translateMatch(selector, cases, mode, pt)._1, mode, pt) + else typedMatchAnonFun(tree, cases, mode, pt) + } else if (selector == EmptyTree) { val arity = if (isFunctionType(pt)) pt.normalize.typeArgs.length - 1 else 1 val params = for (i <- List.range(0, arity)) yield atPos(tree.pos.focusStart) { @@ -3445,32 +3589,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } else { val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt) - - if (isPastTyper || !opt.virtPatmat) { - val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe)) - if (needAdapt) { - cases1 = cases1 map (adaptCase(_, owntype)) - } - treeCopy.Match(tree, selector1, cases1) setType owntype - } else { // don't run translator after typers (see comments in PatMatVirtualiser) - val (owntype0, needAdapt) = ptOrLub(cases1 map (x => repackExistential(x.tpe))) - val owntype = elimAnonymousClass(owntype0) - if (needAdapt) cases1 = cases1 map (adaptCase(_, owntype)) - - (MatchTranslator(this)).translateMatch(selector1, cases1, owntype) match { - case Block(vd :: Nil, tree@Match(selector, cases)) => - val selector1 = checkDead(typed(selector, EXPRmode | BYVALmode, WildcardType)) - var cases1 = typedCases(cases, packCaptured(selector1.tpe.widen), pt) - val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe)) - if (needAdapt) - cases1 = cases1 map (adaptCase(_, owntype)) - typed(Block(vd :: Nil, treeCopy.Match(tree, selector1, cases1) setType owntype)) - case translated => - // TODO: get rid of setType owntype -- it should all typecheck - // must call typed, not typed1, or we overflow the stack when emitting switches - typed(translated, mode, WildcardType) setType owntype - } + val (owntype, needAdapt) = ptOrLub(cases1 map (_.tpe)) + if (needAdapt) { + cases1 = cases1 map (adaptCase(_, mode, owntype)) } + treeCopy.Match(tree, selector1, cases1) setType owntype } } @@ -4240,9 +4363,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { } } - def adaptCase(cdef: CaseDef, tpe: Type): CaseDef = - deriveCaseDef(cdef)(adapt(_, mode, tpe)) - // begin typed1 val sym: Symbol = tree.symbol if ((sym ne null) && (sym ne NoSymbol)) sym.initialize @@ -4351,7 +4471,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val (owntype, needAdapt) = ptOrLub(block1.tpe :: (catches1 map (_.tpe))) if (needAdapt) { block1 = adapt(block1, mode, owntype) - catches1 = catches1 map (adaptCase(_, owntype)) + catches1 = catches1 map (adaptCase(_, mode, owntype)) } if(!isPastTyper && opt.virtPatmat) { diff --git a/test/files/pos/virtpatmat_anonfun_for.flags b/test/files/pos/virtpatmat_anonfun_for.flags new file mode 100644 index 0000000000..23e3dc7d26 --- /dev/null +++ b/test/files/pos/virtpatmat_anonfun_for.flags @@ -0,0 +1 @@ +-Yvirtpatmat \ No newline at end of file diff --git a/test/files/pos/virtpatmat_anonfun_for.scala b/test/files/pos/virtpatmat_anonfun_for.scala new file mode 100644 index 0000000000..8623cd97ba --- /dev/null +++ b/test/files/pos/virtpatmat_anonfun_for.scala @@ -0,0 +1,8 @@ +trait Foo { + def bla = { + val tvs = "tvs" + Nil.foreach(x => x match { + case _ => println(tvs) + }) + } +} \ No newline at end of file diff --git a/test/files/run/virtpatmat_partial.check b/test/files/run/virtpatmat_partial.check index 1555eca82b..137d16da79 100644 --- a/test/files/run/virtpatmat_partial.check +++ b/test/files/run/virtpatmat_partial.check @@ -1,4 +1,17 @@ Map(a -> Some(1), b -> None) -79 -undefined Map(a -> 1) +a +undefined +a +undefined +a +undefined +a +undefined +hai! +hai! +2 +hai! +undefined +1 +undefined diff --git a/test/files/run/virtpatmat_partial.scala b/test/files/run/virtpatmat_partial.scala index 6597f2f5ae..a235314610 100644 --- a/test/files/run/virtpatmat_partial.scala +++ b/test/files/run/virtpatmat_partial.scala @@ -2,95 +2,180 @@ object Test extends App { val a = Map("a" -> Some(1), "b" -> None) println(a) +// inferred type should be Map[String, Int] val res = a collect {case (p, Some(a)) => (p, a)} - final val GT = 79 - final val GTGT = 93 - final val GTGTGT = 94 - final val GTEQ = 81 - final val GTGTEQ = 113 - final val GTGTGTEQ = 114 - final val ASSIGN = 75 - - def acceptClosingAngle(in: Int) { - val closers: PartialFunction[Int, Int] = { - case GTGTGTEQ => GTGTEQ - case GTGTGT => GTGT - case GTGTEQ => GTEQ - case GTGT => GT - case GTEQ => ASSIGN +// variations: const target -> switch, non-const -> normal match, char target --> scrut needs toInt, +// eta-expanded --> work is done by typedFunction, non-eta-expanded --> typedMatch + + object nonConstCharEta { + final val GT : Char = 'a' + final val GTGT : Char = 'b' + final val GTGTGT : Char = 'c' + final val GTEQ : Char = 'd' + final val GTGTEQ : Char = 'e' + final val GTGTGTEQ: Char = 'f' + final val ASSIGN : Char = 'g' + + def acceptClosingAngle(in: Char) { + val closers: PartialFunction[Char, Char] = { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) + } + } + + object nonConstChar { + final val GT : Char = 'a' + final val GTGT : Char = 'b' + final val GTGTGT : Char = 'c' + final val GTEQ : Char = 'd' + final val GTGTEQ : Char = 'e' + final val GTGTGTEQ: Char = 'f' + final val ASSIGN : Char = 'g' + + def acceptClosingAngle(in: Char) { + val closers: PartialFunction[Char, Char] = x => x match { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) } - if (closers isDefinedAt in) println(closers(in)) - else println("undefined") } - acceptClosingAngle(GTGT) - acceptClosingAngle(ASSIGN) - - // should uncurry to: - // val res: Map[String,Int] = a.collect[(String, Int), Map[String,Int]]( - // new PartialFunction[(String, Option[Int]),(String, Int)] { - // def apply(x0_1: (String, Option[Int])): (String, Int) = MatchingStrategy.OptionMatchingStrategy.runOrElse[(String, Option[Int]), (String, Int)](x0_1)( - // (x1: (String, Option[Int])) => { - // val o9: Option[(String, Int)] = ({ - // val o8: Option[(String, Option[Int])] = Tuple2.unapply[String, Option[Int]](x1); - // if (o8.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // { - // val o7: Option[Some[Int]] = if (o8.get._2.isInstanceOf[Some[Int]]) - // MatchingStrategy.OptionMatchingStrategy.one[Some[Int]](o8.get._2.asInstanceOf[Some[Int]]) - // else - // MatchingStrategy.OptionMatchingStrategy.zero; - // if (o7.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // { - // val o6: Option[Int] = Some.unapply[Int](o7.get); - // if (o6.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // MatchingStrategy.OptionMatchingStrategy.one[(String, Int)]((o8.get._1, o6.get).asInstanceOf[(String, Int)]) - // } - // } - // }: Option[(String, Int)]); - // if (o9.isEmpty) - // (MatchingStrategy.OptionMatchingStrategy.zero: Option[(String, Int)]) - // else - // o9 - // }) - // - // def isDefinedAt(x_1: (String, Option[Int])): Boolean = MatchingStrategy.OptionMatchingStrategy.isSuccess[(String, Option[Int]), (String, Int)](x_1)( - // (x1: (String, Option[Int])) => { - // val o9: Option[(String, Int)] = ({ - // val o8: Option[(String, Option[Int])] = scala.Tuple2.unapply[String, Option[Int]](x1); - // if (o8.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // { - // val o7: Option[Some[Int]] = if (o8.get._2.isInstanceOf[Some[Int]]) - // MatchingStrategy.OptionMatchingStrategy.one[Some[Int]](o8.get._2.asInstanceOf[Some[Int]]) // XXX - // else - // MatchingStrategy.OptionMatchingStrategy.zero; - // if (o7.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // { - // val o6: Option[Int] = scala.Some.unapply[Int](o7.get); - // if (o6.isEmpty) - // MatchingStrategy.OptionMatchingStrategy.zero - // else - // MatchingStrategy.OptionMatchingStrategy.one[(String, Int)](null.asInstanceOf[(String, Int)]) - // } - // } - // }: Option[(String, Int)]); - // if (o9.isEmpty) - // (MatchingStrategy.OptionMatchingStrategy.zero: Option[(String, Int)]) - // else - // o9 - // }) - // } - // ) - - println(res) + object constCharEta { + final val GT = 'a' + final val GTGT = 'b' + final val GTGTGT = 'c' + final val GTEQ = 'd' + final val GTGTEQ = 'e' + final val GTGTGTEQ= 'f' + final val ASSIGN = 'g' + + def acceptClosingAngle(in: Char) { + val closers: PartialFunction[Char, Char] = x => x match { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) + } + } + + object constChar { + final val GT = 'a' + final val GTGT = 'b' + final val GTGTGT = 'c' + final val GTEQ = 'd' + final val GTGTEQ = 'e' + final val GTGTGTEQ= 'f' + final val ASSIGN = 'g' + + def acceptClosingAngle(in: Char) { + val closers: PartialFunction[Char, Char] = { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) + } + } + + object constIntEta { + final val GT = 1 + final val GTGT = 2 + final val GTGTGT = 3 + final val GTEQ = 4 + final val GTGTEQ = 5 + final val GTGTGTEQ = 6 + final val ASSIGN = 7 + + def acceptClosingAngle(in: Int) { + val closers: PartialFunction[Int, Int] = x => {println("hai!"); (x + 1)} match { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) + } + } + + object constInt { + final val GT = 1 + final val GTGT = 2 + final val GTGTGT = 3 + final val GTEQ = 4 + final val GTGTEQ = 5 + final val GTGTGTEQ = 6 + final val ASSIGN = 7 + + def acceptClosingAngle(in: Int) { + val closers: PartialFunction[Int, Int] = { + case GTGTGTEQ => GTGTEQ + case GTGTGT => GTGT + case GTGTEQ => GTEQ + case GTGT => GT + case GTEQ => ASSIGN + } + if (closers isDefinedAt in) println(closers(in)) + else println("undefined") + } + + def test() = { + acceptClosingAngle(GTGT) + acceptClosingAngle(ASSIGN) + } + } + + println(res) // prints "Map(a -> 1)" + + nonConstCharEta.test() + nonConstChar.test() + constCharEta.test() + constChar.test() + constIntEta.test() + constInt.test() } diff --git a/test/pending/run/virtpatmat_anonfun_underscore.check b/test/pending/run/virtpatmat_anonfun_underscore.check new file mode 100644 index 0000000000..e69de29bb2 diff --git a/test/pending/run/virtpatmat_anonfun_underscore.flags b/test/pending/run/virtpatmat_anonfun_underscore.flags new file mode 100644 index 0000000000..23e3dc7d26 --- /dev/null +++ b/test/pending/run/virtpatmat_anonfun_underscore.flags @@ -0,0 +1 @@ +-Yvirtpatmat \ No newline at end of file diff --git a/test/pending/run/virtpatmat_anonfun_underscore.scala b/test/pending/run/virtpatmat_anonfun_underscore.scala new file mode 100644 index 0000000000..db6705d025 --- /dev/null +++ b/test/pending/run/virtpatmat_anonfun_underscore.scala @@ -0,0 +1,4 @@ +object Test extends App { + List(1,2,3) map (_ match { case x => x + 1} ) // `_ match` is redundant but shouldn't crash the compiler + List((1,2)) map (_ match { case (x, z) => x + z}) +} \ No newline at end of file -- cgit v1.2.3 From 9e513a6d29f2cb060caf58ff5568d7955b96305a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Sat, 17 Mar 2012 12:15:37 +0100 Subject: [vpm] fix half of my existential troubles no need for the clunky repackExistential pack the type of each case instead, since the skolems we've created shouldn't last beyond the case anyway this way we don't end up with fresh, incompatible, skolems for every case, but a neatly packed existential --- src/compiler/scala/reflect/internal/Types.scala | 9 ------ .../tools/nsc/typechecker/PatMatVirtualiser.scala | 10 +++---- .../scala/tools/nsc/typechecker/Typers.scala | 6 +++- test/files/pos/virtpatmat_exist4.scala | 35 ++++++++++++++++++++++ 4 files changed, 45 insertions(+), 15 deletions(-) create mode 100644 test/files/pos/virtpatmat_exist4.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 549c9e4607..2bb19e2b65 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -2617,15 +2617,6 @@ trait Types extends api.Types { self: SymbolTable => } } - // TODO: I don't really know why this happens -- maybe because - // the owner hierarchy changes? the other workaround (besides - // repackExistential) is to explicitly pass expectedTp as the type - // argument for the call to guard, but repacking the existential - // somehow feels more robust - // - // TODO: check if optimization makes a difference, try something else - // if necessary (cache?) - /** Repack existential types, otherwise they sometimes get unpacked in the * wrong location (type inference comes up with an unexpected skolem) */ diff --git a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala index 34fefd20fe..de7f03dc62 100644 --- a/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala +++ b/src/compiler/scala/tools/nsc/typechecker/PatMatVirtualiser.scala @@ -1037,7 +1037,7 @@ class Foo(x: Other) { x._1 } // no error in this order // assert(owner ne null); assert(owner ne NoSymbol) def freshSym(pos: Position, tp: Type = NoType, prefix: String = "x") = - NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo repackExistential(tp) + NoSymbol.newTermSymbol(freshName(prefix), pos) setInfo /*repackExistential*/(tp) // codegen relevant to the structure of the translation (how extractors are combined) trait AbsCodegen { @@ -1079,18 +1079,18 @@ class Foo(x: Other) { x._1 } // no error in this order def and(a: Tree, b: Tree): Tree = a AND b // the force is needed mainly to deal with the GADT typing hack (we can't detect it otherwise as tp nor pt need contain an abstract type, we're just casting wildly) - def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = repackExistential(tp) + def _asInstanceOf(t: Tree, tp: Type, force: Boolean = false): Tree = { val tpX = /*repackExistential*/(tp) if (!force && (t.tpe ne NoType) && t.isTyped && typesConform(t.tpe, tpX)) t //{ println("warning: emitted redundant asInstanceOf: "+(t, t.tpe, tp)); t } //.setType(tpX) else gen.mkAsInstanceOf(t, tpX, true, false) } - def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), repackExistential(tp), true, false) - // { val tpX = repackExistential(tp) + def _isInstanceOf(b: Symbol, tp: Type): Tree = gen.mkIsInstanceOf(REF(b), /*repackExistential*/(tp), true, false) + // { val tpX = /*repackExistential*/(tp) // if (typesConform(b.info, tpX)) { println("warning: emitted spurious isInstanceOf: "+(b, tp)); TRUE } // else gen.mkIsInstanceOf(REF(b), tpX, true, false) // } - def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = repackExistential(tp) + def _asInstanceOf(b: Symbol, tp: Type): Tree = { val tpX = /*repackExistential*/(tp) if (typesConform(b.info, tpX)) REF(b) //{ println("warning: emitted redundant asInstanceOf: "+(b, b.info, tp)); REF(b) } //.setType(tpX) else gen.mkAsInstanceOf(REF(b), tpX, true, false) } diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 506e347828..daf4ddd100 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -2140,7 +2140,11 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { def typedCases(cases: List[CaseDef], pattp: Type, pt: Type): List[CaseDef] = cases mapConserve { cdef => - newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt) + val caseTyped = newTyper(context.makeNewScope(cdef, context.owner)).typedCase(cdef, pattp, pt) + if (opt.virtPatmat) { + val tpPacked = packedType(caseTyped, context.owner) + caseTyped setType tpPacked + } else caseTyped } def adaptCase(cdef: CaseDef, mode: Int, tpe: Type): CaseDef = deriveCaseDef(cdef)(adapt(_, mode, tpe)) diff --git a/test/files/pos/virtpatmat_exist4.scala b/test/files/pos/virtpatmat_exist4.scala new file mode 100644 index 0000000000..a04d0e3229 --- /dev/null +++ b/test/files/pos/virtpatmat_exist4.scala @@ -0,0 +1,35 @@ +trait Global { + trait Tree + trait Symbol { def foo: Boolean } +} + +trait IMain { self: MemberHandlers => + val global: Global + def handlers: List[MemberHandler] +} + +trait MemberHandlers { + val intp: IMain + import intp.global._ + sealed abstract class MemberHandler(val member: Tree) { + def importedSymbols: List[Symbol] + } +} + +object Test { + var intp: IMain with MemberHandlers = null + + val handlers = intp.handlers + handlers.filterNot(_.importedSymbols.isEmpty).zipWithIndex foreach { + case (handler, idx) => + val (types, terms) = handler.importedSymbols partition (_.foo) + } +} + +object Test2 { + type JClass = java.lang.Class[_] + + def tvarString(bounds: List[AnyRef]) = { + bounds collect { case x: JClass => x } + } +} \ No newline at end of file -- cgit v1.2.3 From d8ba5d091e5641553b438ef9930a6023a2709dcd Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 20 Mar 2012 13:05:17 -0700 Subject: Revert irrefutability commits. Temporary reversion of irrefutability commits in interests of stable milestone. Expect to restore shortly. --- src/compiler/scala/reflect/internal/TreeInfo.scala | 20 +----------- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 34 +++++++++++--------- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/t5589neg.check | 37 ---------------------- test/files/neg/t5589neg.scala | 6 ---- test/files/neg/t5589neg2.check | 9 ------ test/files/neg/t5589neg2.scala | 13 -------- test/files/pos/irrefutable.scala | 22 ------------- test/files/pos/t1336.scala | 10 ------ test/files/pos/t5589.scala | 22 ------------- test/files/run/t4574.check | 2 -- test/files/run/t4574.scala | 13 -------- 12 files changed, 21 insertions(+), 169 deletions(-) delete mode 100644 test/files/neg/t5589neg.check delete mode 100644 test/files/neg/t5589neg.scala delete mode 100644 test/files/neg/t5589neg2.check delete mode 100644 test/files/neg/t5589neg2.scala delete mode 100644 test/files/pos/irrefutable.scala delete mode 100644 test/files/pos/t1336.scala delete mode 100644 test/files/pos/t5589.scala delete mode 100644 test/files/run/t4574.check delete mode 100644 test/files/run/t4574.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala index ce3de94335..769d7a9ed1 100644 --- a/src/compiler/scala/reflect/internal/TreeInfo.scala +++ b/src/compiler/scala/reflect/internal/TreeInfo.scala @@ -17,7 +17,7 @@ abstract class TreeInfo { val global: SymbolTable import global._ - import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass } + import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass } /* Does not seem to be used. Not sure what it does anyway. def isOwnerDefinition(tree: Tree): Boolean = tree match { @@ -312,24 +312,6 @@ abstract class TreeInfo { case _ => false } - /** Is this tree comprised of nothing but identifiers, - * but possibly in bindings or tuples? For instance - * - * foo @ (bar, (baz, quux)) - * - * is a variable pattern; if the structure matches, - * then the remainder is inevitable. - */ - def isVariablePattern(tree: Tree): Boolean = tree match { - case Bind(name, pat) => isVariablePattern(pat) - case Ident(name) => true - case Apply(sel, args) => - ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName) - && (args forall isVariablePattern) - ) - case _ => false - } - /** Is this argument node of the form : _* ? */ def isWildcardStarArg(tree: Tree): Boolean = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 0bc88d1efd..0d2fbc5372 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -262,25 +262,29 @@ abstract class TreeBuilder { else if (stats.length == 1) stats.head else Block(stats.init, stats.last) - def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = { - val cases = List( - CaseDef(condition, EmptyTree, Literal(Constant(true))), - CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) - ) - val matchTree = makeVisitor(cases, false, scrutineeName) - - atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil)) - } - /** Create tree for for-comprehension generator */ def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = { val pat1 = patvarTransformer.transform(pat) val rhs1 = - if (valeq || treeInfo.isVariablePattern(pat)) rhs - else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING) - - if (valeq) ValEq(pos, pat1, rhs1) - else ValFrom(pos, pat1, rhs1) + if (valeq) rhs + else matchVarPattern(pat1) match { + case Some(_) => + rhs + case None => + atPos(rhs.pos) { + Apply( + Select(rhs, nme.filter), + List( + makeVisitor( + List( + CaseDef(pat1.duplicate, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))), + false, + nme.CHECK_IF_REFUTABLE_STRING + ))) + } + } + if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1) } def makeParam(pname: TermName, tpe: Tree) = diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 73369f09af..ec42d251ff 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1445,7 +1445,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R private def transformApply(tree: Apply): Tree = tree match { case Apply( - Select(qual, nme.filter | nme.withFilter), + Select(qual, nme.filter), List(Function( List(ValDef(_, pname, tpt, _)), Match(_, CaseDef(pat1, _, _) :: _)))) diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check deleted file mode 100644 index b3ff16d7e4..0000000000 --- a/test/files/neg/t5589neg.check +++ /dev/null @@ -1,37 +0,0 @@ -t5589neg.scala:2: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead - def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:2: error: constructor cannot be instantiated to expected type; - found : (T1, T2) - required: String - def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:3: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead - def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:3: error: constructor cannot be instantiated to expected type; - found : (T1, T2) - required: String - def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:4: error: constructor cannot be instantiated to expected type; - found : (T1,) - required: (String, Int) - def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:4: error: not found: value y2 - def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:5: error: constructor cannot be instantiated to expected type; - found : (T1, T2, T3) - required: (String, Int) - def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:5: error: not found: value y1 - def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) - ^ -t5589neg.scala:5: error: not found: value y2 - def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) - ^ -two warnings found -7 errors found diff --git a/test/files/neg/t5589neg.scala b/test/files/neg/t5589neg.scala deleted file mode 100644 index 31ff2c3693..0000000000 --- a/test/files/neg/t5589neg.scala +++ /dev/null @@ -1,6 +0,0 @@ -class A { - def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) - def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) - def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) - def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) -} diff --git a/test/files/neg/t5589neg2.check b/test/files/neg/t5589neg2.check deleted file mode 100644 index 6af4955a83..0000000000 --- a/test/files/neg/t5589neg2.check +++ /dev/null @@ -1,9 +0,0 @@ -t5589neg2.scala:7: error: constructor cannot be instantiated to expected type; - found : (T1, T2) - required: String - for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok - ^ -t5589neg2.scala:7: error: not found: value d - for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok - ^ -two errors found diff --git a/test/files/neg/t5589neg2.scala b/test/files/neg/t5589neg2.scala deleted file mode 100644 index b7c7ab7218..0000000000 --- a/test/files/neg/t5589neg2.scala +++ /dev/null @@ -1,13 +0,0 @@ -class A { - def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { - for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok - } - - def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { - for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok - } - - def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { - for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok - } -} \ No newline at end of file diff --git a/test/files/pos/irrefutable.scala b/test/files/pos/irrefutable.scala deleted file mode 100644 index 0a792b644a..0000000000 --- a/test/files/pos/irrefutable.scala +++ /dev/null @@ -1,22 +0,0 @@ -// The test which this should perform but does not -// is that f1 is recognized as irrefutable and f2 is not -// This can be recognized via the generated classes: -// -// A$$anonfun$f1$1.class -// A$$anonfun$f2$1.class -// A$$anonfun$f2$2.class -// -// The extra one in $f2$ is the filter. -// -// !!! Marking with exclamation points so maybe someday -// this test will be finished. -class A { - case class Foo[T](x: T) - - def f1(xs: List[Foo[Int]]) = { - for (Foo(x: Int) <- xs) yield x - } - def f2(xs: List[Foo[Any]]) = { - for (Foo(x: Int) <- xs) yield x - } -} diff --git a/test/files/pos/t1336.scala b/test/files/pos/t1336.scala deleted file mode 100644 index 63967985c7..0000000000 --- a/test/files/pos/t1336.scala +++ /dev/null @@ -1,10 +0,0 @@ -object Foo { - def foreach( f : ((Int,Int)) => Unit ) { - println("foreach") - f(1,2) - } - - for( (a,b) <- this ) { - println((a,b)) - } -} diff --git a/test/files/pos/t5589.scala b/test/files/pos/t5589.scala deleted file mode 100644 index 69cbb20391..0000000000 --- a/test/files/pos/t5589.scala +++ /dev/null @@ -1,22 +0,0 @@ -class A { - // First three compile. - def f1(x: Either[Int, String]) = x.right map (y => y) - def f2(x: Either[Int, String]) = for (y <- x.right) yield y - def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) } - // Last one fails. - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) -/** -./a.scala:5: error: constructor cannot be instantiated to expected type; - found : (T1, T2) - required: Either[Nothing,(String, Int)] - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -./a.scala:5: error: not found: value y1 - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -./a.scala:5: error: not found: value y2 - def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) - ^ -three errors found -**/ -} diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check deleted file mode 100644 index a4522fff24..0000000000 --- a/test/files/run/t4574.check +++ /dev/null @@ -1,2 +0,0 @@ -I hereby refute null! -I denounce null as unListLike! diff --git a/test/files/run/t4574.scala b/test/files/run/t4574.scala deleted file mode 100644 index 1dde496aca..0000000000 --- a/test/files/run/t4574.scala +++ /dev/null @@ -1,13 +0,0 @@ -object Test { - val xs: List[(Int, Int)] = List((2, 2), null) - - def expectMatchError[T](msg: String)(body: => T) { - try { body ; assert(false, "Should not succeed.") } - catch { case _: MatchError => println(msg) } - } - - def main(args: Array[String]): Unit = { - expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x ) - expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } ) - } -} -- cgit v1.2.3 From 6d7bcd5818b856d4596b57b7e9f1543b71ed7329 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 20 Mar 2012 14:23:40 -0700 Subject: Lots of tedious warning and tree printing work. Fewer deprecation warnings, prettier trees, prettier symbols, more polished error messages. Oh the interesting people you meet handling warnings, I feel sorry for you all that I get to do it all the time. One of the characters I met invited me into the "Dead Code Society" and that's what I'm doing on Tuesdays now. No of course you haven't, it's a SECRET society. --- .../scala/reflect/internal/Definitions.scala | 18 +++- src/compiler/scala/reflect/internal/Kinds.scala | 23 +++-- src/compiler/scala/reflect/internal/Symbols.scala | 57 +++++------ src/compiler/scala/reflect/internal/Types.scala | 23 +++-- src/compiler/scala/reflect/runtime/ToolBoxes.scala | 4 +- .../scala/tools/nsc/ast/NodePrinters.scala | 90 +++++++++++++----- .../backend/icode/analysis/TypeFlowAnalysis.scala | 2 +- .../scala/tools/nsc/backend/opt/Inliners.scala | 2 +- .../tools/nsc/reporters/ConsoleReporter.scala | 4 +- .../scala/tools/nsc/symtab/clr/TypeParser.scala | 24 ++--- .../scala/tools/nsc/transform/UnCurry.scala | 2 +- .../tools/nsc/typechecker/ContextErrors.scala | 2 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- .../tools/nsc/typechecker/SyntheticMethods.scala | 2 +- src/library/scala/Specializable.scala | 12 +-- .../scala/collection/GenTraversableOnce.scala | 2 +- .../collection/mutable/ConcurrentTrieMap.scala | 2 +- src/library/scala/concurrent/DelayedLazyVal.scala | 3 +- src/library/scala/reflect/api/Trees.scala | 3 +- src/library/scala/xml/Elem.scala | 8 +- src/library/scala/xml/XML.scala | 17 ++-- test/files/neg/t1364.check | 2 +- test/files/neg/t1477.check | 2 +- test/files/neg/t2070.check | 3 +- test/files/neg/t4044.check | 9 +- test/files/neg/t5152.check | 6 +- test/files/neg/t708.check | 2 +- test/files/neg/t742.check | 3 +- test/files/neg/tcpoly_override.check | 3 +- test/files/neg/tcpoly_typealias.check | 9 +- test/files/neg/tcpoly_variance_enforce.check | 34 ++++--- test/files/run/existentials-in-compiler.check | 104 ++++++++++----------- test/files/run/reify_ann1a.check | 4 +- test/files/run/reify_ann1b.check | 4 +- 34 files changed, 286 insertions(+), 201 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index bd823c3128..a2dd6fc4c3 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -764,9 +764,25 @@ trait Definitions extends reflect.api.StandardDefinitions { else removeRedundantObjects(parents) } + + def typeStringNoPackage(tp: Type) = + "" + tp stripPrefix tp.typeSymbol.enclosingPackage.fullName + "." + + def briefParentsString(parents: List[Type]) = + normalizedParents(parents) map typeStringNoPackage mkString " with " + def parentsString(parents: List[Type]) = normalizedParents(parents) mkString " with " + def typeParamsString(tp: Type) = tp match { + case PolyType(tparams, _) => tparams map (_.defString) mkString ("[", ",", "]") + case _ => "" + } + def valueParamsString(tp: Type) = tp match { + case MethodType(params, _) => params map (_.defString) mkString ("(", ",", ")") + case _ => "" + } + // members of class java.lang.{ Object, String } lazy val Object_## = enterNewMethod(ObjectClass, nme.HASHHASH, Nil, inttype, FINAL) lazy val Object_== = enterNewMethod(ObjectClass, nme.EQ, anyrefparam, booltype, FINAL) @@ -970,7 +986,7 @@ trait Definitions extends reflect.api.StandardDefinitions { case (_, restpe) => NullaryMethodType(restpe) } - msym setInfoAndEnter polyType(tparams, mtpe) + msym setInfoAndEnter genPolyType(tparams, mtpe) } /** T1 means one type parameter. diff --git a/src/compiler/scala/reflect/internal/Kinds.scala b/src/compiler/scala/reflect/internal/Kinds.scala index 23bff950b8..eca63c7c15 100644 --- a/src/compiler/scala/reflect/internal/Kinds.scala +++ b/src/compiler/scala/reflect/internal/Kinds.scala @@ -49,9 +49,15 @@ trait Kinds { private def kindMessage(a: Symbol, p: Symbol)(f: (String, String) => String): String = f(a+qualify(a,p), p+qualify(p,a)) + // Normally it's nicer to print nothing rather than '>: Nothing <: Any' all over + // the place, but here we need it for the message to make sense. private def strictnessMessage(a: Symbol, p: Symbol) = - kindMessage(a, p)("%s's bounds %s are stricter than %s's declared bounds %s".format( - _, a.info, _, p.info)) + kindMessage(a, p)("%s's bounds%s are stricter than %s's declared bounds%s".format( + _, a.info, _, p.info match { + case tb @ TypeBounds(_, _) if tb.isEmptyBounds => " >: Nothing <: Any" + case tb => "" + tb + }) + ) private def varianceMessage(a: Symbol, p: Symbol) = kindMessage(a, p)("%s is %s, but %s is declared %s".format(_, varStr(a), _, varStr(p))) @@ -62,11 +68,16 @@ trait Kinds { _, countAsString(p.typeParams.length)) ) + private def buildMessage(xs: List[SymPair], f: (Symbol, Symbol) => String) = ( + if (xs.isEmpty) "" + else xs map f.tupled mkString ("\n", ", ", "") + ) + def errorMessage(targ: Type, tparam: Symbol): String = ( - (targ+"'s type parameters do not match "+tparam+"'s expected parameters: ") - + (arity map { case (a, p) => arityMessage(a, p) } mkString ", ") - + (variance map { case (a, p) => varianceMessage(a, p) } mkString ", ") - + (strictness map { case (a, p) => strictnessMessage(a, p) } mkString ", ") + (targ+"'s type parameters do not match "+tparam+"'s expected parameters:") + + buildMessage(arity, arityMessage) + + buildMessage(variance, varianceMessage) + + buildMessage(strictness, strictnessMessage) ) } val NoKindErrors = KindErrors(Nil, Nil, Nil) diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 907f7d1237..2ba45c5972 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -2078,47 +2078,32 @@ trait Symbols extends api.Symbols { self: SymbolTable => case s => " in " + s } def fullLocationString: String = toString + locationString - def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>" + def signatureString: String = if (hasRawInfo) infoString(rawInfo) else "<_>" /** String representation of symbol's definition following its name */ final def infoString(tp: Type): String = { - def typeParamsString: String = tp match { - case PolyType(tparams, _) if tparams.nonEmpty => - (tparams map (_.defString)).mkString("[", ",", "]") - case _ => - "" - } - if (isClass) - typeParamsString + " extends " + tp.resultType - else if (isAliasType) - typeParamsString + " = " + tp.resultType - else if (isAbstractType) - typeParamsString + { - tp.resultType match { - case TypeBounds(lo, hi) => - (if (lo.typeSymbol == NothingClass) "" else " >: " + lo) + - (if (hi.typeSymbol == AnyClass) "" else " <: " + hi) - case rtp => - "<: " + rtp - } - } - else if (isModule) - moduleClass.infoString(tp) - else - tp match { - case PolyType(tparams, res) => - typeParamsString + infoString(res) - case NullaryMethodType(res) => - infoString(res) - case MethodType(params, res) => - params.map(_.defString).mkString("(", ",", ")") + infoString(res) - case _ => - ": " + tp + def parents = ( + if (settings.debug.value) parentsString(tp.parents) + else briefParentsString(tp.parents) + ) + if (isType) typeParamsString(tp) + ( + if (isClass) " extends " + parents + else if (isAliasType) " = " + tp.resultType + else tp.resultType match { + case rt @ TypeBounds(_, _) => "" + rt + case rt => " <: " + rt } + ) + else if (isModule) moduleClass.infoString(tp) + else tp match { + case PolyType(tparams, res) => typeParamsString(tp) + infoString(res) + case NullaryMethodType(res) => infoString(res) + case MethodType(params, res) => valueParamsString(tp) + infoString(res) + case _ => ": " + tp + } } - def infosString = infos.toString() - + def infosString = infos.toString def debugLocationString = fullLocationString + " " + debugFlagString def debugFlagString = hasFlagsToString(-1L) def hasFlagsToString(mask: Long): String = flagsToString( @@ -2684,7 +2669,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => override def rawInfo: Type = NoType protected def doCookJavaRawInfo() {} override def accessBoundary(base: Symbol): Symbol = RootClass - def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort() + def cloneSymbolImpl(owner: Symbol, newFlags: Long): Symbol = abort("NoSymbol.clone()") override def originalEnclosingMethod = this override def owner: Symbol = diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index 2bb19e2b65..a20853adc8 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -1337,9 +1337,14 @@ trait Types extends api.Types { self: SymbolTable => case TypeBounds(_, _) => that <:< this case _ => lo <:< that && that <:< hi } - def isEmptyBounds = (lo.typeSymbolDirect eq NothingClass) && (hi.typeSymbolDirect eq AnyClass) + private def lowerString = if (emptyLowerBound) "" else " >: " + lo + private def upperString = if (emptyUpperBound) "" else " <: " + hi + private def emptyLowerBound = lo.typeSymbolDirect eq NothingClass + private def emptyUpperBound = hi.typeSymbolDirect eq AnyClass + def isEmptyBounds = emptyLowerBound && emptyUpperBound + // override def isNullable: Boolean = NullClass.tpe <:< lo; - override def safeToString = ">: " + lo + " <: " + hi + override def safeToString = lowerString + upperString override def kind = "TypeBoundsType" } @@ -3321,16 +3326,18 @@ trait Types extends api.Types { self: SymbolTable => * may or may not be poly? (It filched the standard "canonical creator" name.) */ object GenPolyType { - def apply(tparams: List[Symbol], tpe: Type): Type = - if (tparams nonEmpty) typeFun(tparams, tpe) - else tpe // it's okay to be forgiving here + def apply(tparams: List[Symbol], tpe: Type): Type = ( + if (tparams nonEmpty) typeFun(tparams, tpe) + else tpe // it's okay to be forgiving here + ) def unapply(tpe: Type): Option[(List[Symbol], Type)] = tpe match { - case PolyType(tparams, restpe) => Some(tparams, restpe) - case _ => Some(List(), tpe) + case PolyType(tparams, restpe) => Some((tparams, restpe)) + case _ => Some((Nil, tpe)) } } + def genPolyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe) - @deprecated("use GenPolyType(...) instead") + @deprecated("use genPolyType(...) instead", "2.10.0") def polyType(params: List[Symbol], tpe: Type): Type = GenPolyType(params, tpe) /** A creator for anonymous type functions, where the symbol for the type function still needs to be created. diff --git a/src/compiler/scala/reflect/runtime/ToolBoxes.scala b/src/compiler/scala/reflect/runtime/ToolBoxes.scala index 8cc4d5f788..28f12b378f 100644 --- a/src/compiler/scala/reflect/runtime/ToolBoxes.scala +++ b/src/compiler/scala/reflect/runtime/ToolBoxes.scala @@ -1,6 +1,7 @@ package scala.reflect package runtime +import scala.tools.nsc import scala.tools.nsc.reporters.Reporter import scala.tools.nsc.reporters.StoreReporter import scala.tools.nsc.reporters.AbstractReporter @@ -21,8 +22,7 @@ trait ToolBoxes extends { self: Universe => class ToolBox(val reporter: Reporter = new StoreReporter, val options: String = "") { - class ToolBoxGlobal(settings: scala.tools.nsc.Settings, reporter: scala.tools.nsc.reporters.Reporter) - extends ReflectGlobal(settings, reporter) { + class ToolBoxGlobal(settings0: nsc.Settings, reporter0: nsc.reporters.Reporter) extends ReflectGlobal(settings0, reporter0) { import definitions._ private val trace = scala.tools.nsc.util.trace when settings.debug.value diff --git a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala index 07e864879d..acbdcd501f 100644 --- a/src/compiler/scala/tools/nsc/ast/NodePrinters.scala +++ b/src/compiler/scala/tools/nsc/ast/NodePrinters.scala @@ -65,7 +65,7 @@ abstract class NodePrinters { def showDefTreeName(tree: DefTree) = showName(tree.name) def showFlags(tree: MemberDef) = flagsToString(tree.symbol.flags | tree.mods.flags) def showLiteral(lit: Literal) = lit.value.escapedStringValue - def showTypeTree(tt: TypeTree) = "" + showAttributes(tt) + def showTypeTree(tt: TypeTree) = "" + emptyOrComment(showType(tt)) def showName(name: Name) = name match { case nme.EMPTY | tpnme.EMPTY => "" case name => "\"" + name + "\"" @@ -74,18 +74,18 @@ abstract class NodePrinters { def showSymbol(tree: Tree): String = { val sym = tree.symbol if (sym == null || sym == NoSymbol) "" - else " sym/owner/tpe=%s %s/%s/%s".format(sym.accurateKindString, sym.name, sym.owner, sym.tpe) + else sym.defString + sym.locationString } def showType(tree: Tree): String = { val tpe = tree.tpe if (tpe == null || tpe == NoType) "" - else " tree.tpe=" + tpe + else "tree.tpe=" + tpe } def showAttributes(tree: Tree): String = { if (infolevel == InfoLevel.Quiet) "" else { - try { showSymbol(tree) + showType(tree) trim } + try { List(showSymbol(tree), showType(tree)) filterNot (_ == "") mkString ", " trim } catch { case ex: Throwable => "sym= " + ex.getMessage } } } @@ -115,13 +115,11 @@ abstract class NodePrinters { case Select(_, _) => prefix0 + "." case _ => "" }) - def attrs = showAttributes(tree) match { - case "" => "" - case s => " // " + s - } - prefix + showName(tree.name) + attrs + prefix + showName(tree.name) + emptyOrComment(showAttributes(tree)) } + def emptyOrComment(s: String) = if (s == "") "" else " // " + s + def stringify(tree: Tree): String = { buf.clear() level = 0 @@ -141,7 +139,10 @@ abstract class NodePrinters { buf append " " * level buf append value if (comment != "") { - buf append " // " + if (value != "") + buf append " " + + buf append "// " buf append comment } buf append EOL @@ -179,7 +180,7 @@ abstract class NodePrinters { def applyCommon(tree: Tree, fun: Tree, args: List[Tree]) { printMultiline(tree) { traverse(fun) - traverseList("Nil", _ + " arguments(s)")(args) + traverseList("Nil", "argument")(args) } } @@ -191,17 +192,26 @@ abstract class NodePrinters { indent(body) println(")") } + @inline private def indent[T](body: => T): T = { level += 1 try body finally level -= 1 } - def traverseList(ifEmpty: String, comment: Int => String)(trees: List[Tree]) { + def traverseList(ifEmpty: String, what: String)(trees: List[Tree]) { if (trees.isEmpty) println(ifEmpty) - else - printMultiline("List", comment(trees.length))(trees foreach traverse) + else if (trees.tail.isEmpty) + traverse(trees.head) + else { + printLine("", trees.length + " " + what + "s") + trees foreach traverse + } + } + + def printSingle(tree: Tree, name: Name) { + println(tree.printingPrefix + "(" + showName(name) + ")" + showAttributes(tree)) } def traverse(tree: Tree) { @@ -210,16 +220,44 @@ abstract class NodePrinters { case ApplyDynamic(fun, args) => applyCommon(tree, fun, args) case Apply(fun, args) => applyCommon(tree, fun, args) + case Throw(Ident(name)) => + printSingle(tree, name) + + case Function(vparams, body) => + printMultiline(tree) { + traverseList("()", "parameter")(vparams) + traverse(body) + } + case Try(block, catches, finalizer) => + printMultiline(tree) { + traverse(block) + traverseList("{}", "case")(catches) + if (finalizer ne EmptyTree) + traverse(finalizer) + } + + case Match(selector, cases) => + printMultiline(tree) { + traverse(selector) + traverseList("", "case")(cases) + } + case CaseDef(pat, guard, body) => + printMultiline(tree) { + traverse(pat) + if (guard ne EmptyTree) + traverse(guard) + traverse(body) + } case Block(stats, expr) => printMultiline(tree) { - traverseList("{}", _ + " statement(s)")(stats) + traverseList("{}", "statement")(stats) traverse(expr) } case cd @ ClassDef(mods, name, tparams, impl) => printMultiline(tree) { printModifiers(cd) println(showDefTreeName(cd)) - traverseList("[]", _ + " type parameter(s)")(tparams) + traverseList("[]", "type parameter")(tparams) traverse(impl) } case md @ ModuleDef(mods, name, impl) => @@ -232,14 +270,16 @@ abstract class NodePrinters { printMultiline(tree) { printModifiers(dd) println(showDefTreeName(dd)) - traverseList("[]", _ + " type parameter(s)")(tparams) + traverseList("[]", "type parameter")(tparams) vparamss match { case Nil => println("Nil") case Nil :: Nil => println("List(Nil)") - case xss => - printMultiline("List", xss.length + " parameter list(s)") { - xss foreach (xs => traverseList("()", _ + " parameter(s)")(xs)) - } + case ps :: Nil => + printLine("", "1 parameter list") + ps foreach traverse + case pss => + printLine("", pss.length + " parameter lists") + pss foreach (ps => traverseList("()", "parameter")(ps)) } traverse(tpt) traverse(rhs) @@ -268,14 +308,14 @@ abstract class NodePrinters { } printLine(ps0 mkString ", ", "parents") traverse(self) - traverseList("{}", _ + " statements in body")(body) + traverseList("{}", "statement")(body) } case This(qual) => - println("This(\"" + showName(qual) + "\")" + showAttributes(tree)) + printSingle(tree, qual) case TypeApply(fun, args) => printMultiline(tree) { traverse(fun) - traverseList("[]", _ + " type argument(s)")(args) + traverseList("[]", "type argument")(args) } case tt @ TypeTree() => println(showTypeTree(tt)) @@ -296,7 +336,7 @@ abstract class NodePrinters { printMultiline(tree) { printModifiers(td) println(showDefTreeName(td)) - traverseList("[]", _ + " type parameter(s)")(tparams) + traverseList("[]", "type parameter")(tparams) traverse(rhs) } diff --git a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala index 877c51ebc1..d31eafff48 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/analysis/TypeFlowAnalysis.scala @@ -645,7 +645,7 @@ abstract class TypeFlowAnalysis { } while(!done) lastInstruction.clear() - for(b <- isOnPerimeter; val lastIns = b.toList.reverse find isOnWatchlist) { + for (b <- isOnPerimeter; lastIns = b.toList.reverse find isOnWatchlist) { lastInstruction += (b -> lastIns.get.asInstanceOf[opcodes.CALL_METHOD]) } diff --git a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala index a734b2b92b..dfe9081ee5 100644 --- a/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala +++ b/src/compiler/scala/tools/nsc/backend/opt/Inliners.scala @@ -262,7 +262,7 @@ abstract class Inliners extends SubComponent { def inlineWithoutTFA(inputBlocks: Traversable[BasicBlock], callsites: Function1[BasicBlock, List[opcodes.CALL_METHOD]]): Int = { var inlineCount = 0 import scala.util.control.Breaks._ - for(x <- inputBlocks; val easyCake = callsites(x); if easyCake.nonEmpty) { + for(x <- inputBlocks; easyCake = callsites(x); if easyCake.nonEmpty) { breakable { for(ocm <- easyCake) { assert(ocm.method.isEffectivelyFinal && ocm.method.owner.isEffectivelyFinal) diff --git a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala index c76a04c6ba..f5335fb0f5 100644 --- a/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala +++ b/src/compiler/scala/tools/nsc/reporters/ConsoleReporter.scala @@ -8,7 +8,7 @@ package reporters import java.io.{ BufferedReader, IOException, PrintWriter } import util._ -import scala.tools.util.StringOps.countElementsAsString +import scala.tools.util.StringOps /** * This class implements a Reporter that displays messages on a text @@ -40,7 +40,7 @@ class ConsoleReporter(val settings: Settings, reader: BufferedReader, writer: Pr * @return ... */ private def getCountString(severity: Severity): String = - countElementsAsString((severity).count, label(severity)) + StringOps.countElementsAsString((severity).count, label(severity)) /** Prints the message. */ def printMessage(msg: String) { writer.print(msg + "\n"); writer.flush() } diff --git a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala index e11a5a4ad9..4b847fa94a 100644 --- a/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/clr/TypeParser.scala @@ -108,7 +108,7 @@ abstract class TypeParser { val method = new ConstructorInfo(declType, attrs, Array[MSILType]()) val flags = Flags.JAVA val owner = clazz - val methodSym = owner.newMethod(NoPosition, nme.CONSTRUCTOR).setFlag(flags) + val methodSym = owner.newMethod(nme.CONSTRUCTOR, NoPosition, flags) val rettype = clazz.tpe val mtype = methodType(Array[MSILType](), rettype); val mInfo = mtype(methodSym) @@ -224,14 +224,14 @@ abstract class TypeParser { if (canBeTakenAddressOf) { clazzBoxed.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata - else polyType(ownTypeParams, classInfoAsInMetadata) ) + else genPolyType(ownTypeParams, classInfoAsInMetadata) ) clazzBoxed.setFlag(flags) val rawValueInfoType = ClassInfoType(definitions.anyvalparam, instanceDefs, clazz) clazz.setInfo( if (ownTypeParams.isEmpty) rawValueInfoType - else polyType(ownTypeParams, rawValueInfoType) ) + else genPolyType(ownTypeParams, rawValueInfoType) ) } else { clazz.setInfo( if (ownTypeParams.isEmpty) classInfoAsInMetadata - else polyType(ownTypeParams, classInfoAsInMetadata) ) + else genPolyType(ownTypeParams, classInfoAsInMetadata) ) } // TODO I don't remember if statics.setInfo and staticModule.setInfo should also know about type params @@ -284,7 +284,7 @@ abstract class TypeParser { else getCLRType(field.FieldType) val owner = if (field.IsStatic()) statics else clazz; - val sym = owner.newValue(NoPosition, name).setFlag(flags).setInfo(fieldType); + val sym = owner.newValue(name, NoPosition, flags).setInfo(fieldType); // TODO: set private within!!! -> look at typechecker/Namers.scala (if (field.IsStatic()) staticDefs else instanceDefs).enter(sym); clrTypes.fields(sym) = field; @@ -313,7 +313,7 @@ abstract class TypeParser { val name: Name = if (gparamsLength == 0) prop.Name else nme.apply; val flags = translateAttributes(getter); val owner: Symbol = if (getter.IsStatic) statics else clazz; - val methodSym = owner.newMethod(NoPosition, name).setFlag(flags) + val methodSym = owner.newMethod(name, NoPosition, flags) val mtype: Type = if (gparamsLength == 0) NullaryMethodType(propType) // .NET properties can't be polymorphic else methodType(getter, getter.ReturnType)(methodSym) methodSym.setInfo(mtype); @@ -337,7 +337,7 @@ abstract class TypeParser { val flags = translateAttributes(setter); val mtype = methodType(setter, definitions.UnitClass.tpe); val owner: Symbol = if (setter.IsStatic) statics else clazz; - val methodSym = owner.newMethod(NoPosition, name).setFlag(flags) + val methodSym = owner.newMethod(name, NoPosition, flags) methodSym.setInfo(mtype(methodSym)) methodSym.setFlag(Flags.ACCESSOR); (if (setter.IsStatic) staticDefs else instanceDefs).enter(methodSym); @@ -424,14 +424,14 @@ abstract class TypeParser { val flags = Flags.JAVA | Flags.FINAL for (cmpName <- ENUM_CMP_NAMES) { - val enumCmp = clazz.newMethod(NoPosition, cmpName) + val enumCmp = clazz.newMethod(cmpName) val enumCmpType = JavaMethodType(enumCmp.newSyntheticValueParams(List(clazz.tpe)), definitions.BooleanClass.tpe) enumCmp.setFlag(flags).setInfo(enumCmpType) instanceDefs.enter(enumCmp) } for (bitLogName <- ENUM_BIT_LOG_NAMES) { - val enumBitLog = clazz.newMethod(NoPosition, bitLogName) + val enumBitLog = clazz.newMethod(bitLogName) val enumBitLogType = JavaMethodType(enumBitLog.newSyntheticValueParams(List(clazz.tpe)), clazz.tpe /* was classInfo, infinite typer */) enumBitLog.setFlag(flags).setInfo(enumBitLogType) instanceDefs.enter(enumBitLog) @@ -469,7 +469,7 @@ abstract class TypeParser { val flags = translateAttributes(method); val owner = if (method.IsStatic()) statics else clazz; - val methodSym = owner.newMethod(NoPosition, getName(method)).setFlag(flags) + val methodSym = owner.newMethod(getName(method), NoPosition, flags) /* START CLR generics (snippet 3) */ val newMethodTParams = populateMethodTParams(method, methodSym) /* END CLR generics (snippet 3) */ @@ -480,7 +480,7 @@ abstract class TypeParser { val mtype = methodType(method, rettype); if (mtype == null) return; /* START CLR generics (snippet 4) */ - val mInfo = if (method.IsGeneric) polyType(newMethodTParams, mtype(methodSym)) + val mInfo = if (method.IsGeneric) genPolyType(newMethodTParams, mtype(methodSym)) else mtype(methodSym) /* END CLR generics (snippet 4) */ /* START CLR non-generics (snippet 4) @@ -500,7 +500,7 @@ abstract class TypeParser { } private def createMethod(name: Name, flags: Long, mtype: Symbol => Type, method: MethodInfo, statik: Boolean): Symbol = { - val methodSym: Symbol = (if (statik) statics else clazz).newMethod(NoPosition, name) + val methodSym: Symbol = (if (statik) statics else clazz).newMethod(name) methodSym.setFlag(flags).setInfo(mtype(methodSym)) (if (statik) staticDefs else instanceDefs).enter(methodSym) if (method != null) diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index 03bef83a90..e54e0289bb 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -168,7 +168,7 @@ abstract class UnCurry extends InfoTransform private def nonLocalReturnTry(body: Tree, key: Symbol, meth: Symbol) = { localTyper typed { val extpe = nonLocalReturnExceptionType(meth.tpe.finalResultType) - val ex = meth.newValue(body.pos, nme.ex) setInfo extpe + val ex = meth.newValue(nme.ex, body.pos) setInfo extpe val pat = gen.mkBindForCase(ex, NonLocalReturnControlClass, List(meth.tpe.finalResultType)) val rhs = ( IF ((ex DOT nme.key)() OBJ_EQ Ident(key)) diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index ed9fee986f..e37f5784c9 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -684,7 +684,7 @@ trait ContextErrors { def errMsg = { val location = if (sym.isClassConstructor) owner0 else pre.widen - underlying(sym).fullLocationString + " cannot be accessed in " + + underlyingSymbol(sym).fullLocationString + " cannot be accessed in " + location + explanation } NormalTypeError(tree, errMsg, ErrorKinds.Access) diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index ec42d251ff..fa19f380fd 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -266,7 +266,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R (if (showLocation) sym1.locationString + (if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1) - else if (sym1.isAbstractType) " with bounds "+self.memberInfo(sym1) + else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1) else if (sym1.isTerm) " of type "+self.memberInfo(sym1) else "") else "") diff --git a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala index 2f4eff30d2..da87d38ab0 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SyntheticMethods.scala @@ -136,7 +136,7 @@ trait SyntheticMethods extends ast.TreeDSL { * where that is the given methods first parameter. */ def thatTest(eqmeth: Symbol): Tree = - gen.mkIsInstanceOf(Ident(eqmeth.firstParam), typeCaseType(clazz), true, false) + gen.mkIsInstanceOf(Ident(eqmeth.firstParam), classExistentialType(clazz), true, false) /** (that.asInstanceOf[this.C]) * where that is the given methods first parameter. diff --git a/src/library/scala/Specializable.scala b/src/library/scala/Specializable.scala index 67126b3069..d5e22195d2 100644 --- a/src/library/scala/Specializable.scala +++ b/src/library/scala/Specializable.scala @@ -20,10 +20,10 @@ object Specializable { // Smuggle a list of types by way of a tuple upon which Group is parameterized. class Group[T >: Null](value: T) extends SpecializedGroup { } - final val Primitives = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit) - final val Everything = new Group(Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef) - final val Bits32AndUp = new Group(Int, Long, Float, Double) - final val Integral = new Group(Byte, Short, Int, Long, Char) - final val AllNumeric = new Group(Byte, Short, Int, Long, Char, Float, Double) - final val BestOfBreed = new Group(Int, Double, Boolean, Unit, AnyRef) + final val Primitives = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit)) + final val Everything = new Group((Byte, Short, Int, Long, Char, Float, Double, Boolean, Unit, AnyRef)) + final val Bits32AndUp = new Group((Int, Long, Float, Double)) + final val Integral = new Group((Byte, Short, Int, Long, Char)) + final val AllNumeric = new Group((Byte, Short, Int, Long, Char, Float, Double)) + final val BestOfBreed = new Group((Int, Double, Boolean, Unit, AnyRef)) } diff --git a/src/library/scala/collection/GenTraversableOnce.scala b/src/library/scala/collection/GenTraversableOnce.scala index f18e2ab6bb..019d3d0785 100644 --- a/src/library/scala/collection/GenTraversableOnce.scala +++ b/src/library/scala/collection/GenTraversableOnce.scala @@ -124,7 +124,7 @@ trait GenTraversableOnce[+A] extends Any { * scala> val b = (a /:\ 5)(_+_) * b: Int = 15 * }}}*/ - @deprecated("use fold instead") + @deprecated("use fold instead", "2.10.0") def /:\[A1 >: A](z: A1)(op: (A1, A1) => A1): A1 = fold(z)(op) /** Applies a binary operator to a start value and all elements of this $coll, diff --git a/src/library/scala/collection/mutable/ConcurrentTrieMap.scala b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala index 1a44c8e423..cfe1b1950d 100644 --- a/src/library/scala/collection/mutable/ConcurrentTrieMap.scala +++ b/src/library/scala/collection/mutable/ConcurrentTrieMap.scala @@ -1027,7 +1027,7 @@ private[collection] class ConcurrentTrieMapIterator[K, V](var level: Int, privat Seq(this) } - def printDebug { + def printDebug() { println("ctrie iterator") println(stackpos.mkString(",")) println("depth: " + depth) diff --git a/src/library/scala/concurrent/DelayedLazyVal.scala b/src/library/scala/concurrent/DelayedLazyVal.scala index a17153bad5..96a66d83b6 100644 --- a/src/library/scala/concurrent/DelayedLazyVal.scala +++ b/src/library/scala/concurrent/DelayedLazyVal.scala @@ -40,9 +40,8 @@ class DelayedLazyVal[T](f: () => T, body: => Unit) { def apply(): T = if (isDone) complete else f() // TODO replace with scala.concurrent.future { ... } - ops.future { + future { body _isDone = true } - } diff --git a/src/library/scala/reflect/api/Trees.scala b/src/library/scala/reflect/api/Trees.scala index a8276dc853..466c380cef 100644 --- a/src/library/scala/reflect/api/Trees.scala +++ b/src/library/scala/reflect/api/Trees.scala @@ -165,11 +165,12 @@ trait Trees { self: Universe => def foreach(f: Tree => Unit) { new ForeachTreeTraverser(f).traverse(this) } /** Find all subtrees matching predicate `p` */ - def filter(f: Tree => Boolean): List[Tree] = { + def withFilter(f: Tree => Boolean): List[Tree] = { val ft = new FilterTreeTraverser(f) ft.traverse(this) ft.hits.toList } + def filter(f: Tree => Boolean): List[Tree] = withFilter(f) /** Returns optionally first tree (in a preorder traversal) which satisfies predicate `p`, * or None if none exists. diff --git a/src/library/scala/xml/Elem.scala b/src/library/scala/xml/Elem.scala index cc244a5b88..5b6b9f2bb9 100755 --- a/src/library/scala/xml/Elem.scala +++ b/src/library/scala/xml/Elem.scala @@ -23,12 +23,12 @@ object Elem { * @deprecated This factory method is retained for backward compatibility; please use the other one, with which you * can specify your own preference for minimizeEmpty. */ - @deprecated - def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem = + @deprecated("Use the other apply method in this object", "2.10.0") + def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, child: Node*): Elem = apply(prefix, label, attributes, scope, child.isEmpty, child: _*) - def apply(prefix: String,label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem = - new Elem(prefix,label,attributes,scope, minimizeEmpty, child:_*) + def apply(prefix: String, label: String, attributes: MetaData, scope: NamespaceBinding, minimizeEmpty: Boolean, child: Node*): Elem = + new Elem(prefix, label, attributes, scope, minimizeEmpty, child: _*) def unapplySeq(n: Node) = n match { case _: SpecialNode | _: Group => None diff --git a/src/library/scala/xml/XML.scala b/src/library/scala/xml/XML.scala index 4beba91899..f6955c6612 100755 --- a/src/library/scala/xml/XML.scala +++ b/src/library/scala/xml/XML.scala @@ -15,8 +15,7 @@ import java.io.{ InputStream, Reader, StringReader, Writer } import java.nio.channels.Channels import scala.util.control.Exception.ultimately -object Source -{ +object Source { def fromFile(file: File) = new InputSource(new FileInputStream(file)) def fromFile(fd: FileDescriptor) = new InputSource(new FileInputStream(fd)) def fromFile(name: String) = new InputSource(new FileInputStream(name)) @@ -31,13 +30,18 @@ object Source * Governs how empty elements (i.e. those without child elements) should be serialized. */ object MinimizeMode extends Enumeration { - /** Minimize empty tags if they were originally empty when parsed, or if they were constructed with [[scala.xml.Elem]]`#minimizeEmpty` == true */ + /** Minimize empty tags if they were originally empty when parsed, or if they were constructed + * with [[scala.xml.Elem]]`#minimizeEmpty` == true + */ val Default = Value - /** Always minimize empty tags. Note that this may be problematic for XHTML, in which case [[scala.xml.Xhtml]]`#toXhtml` should be used instead. */ + /** Always minimize empty tags. Note that this may be problematic for XHTML, in which + * case [[scala.xml.Xhtml]]`#toXhtml` should be used instead. + */ val Always = Value - /** Never minimize empty tags. */ + /** Never minimize empty tags. + */ val Never = Value } @@ -50,8 +54,7 @@ import Source._ * @author Burak Emir * @version 1.0, 25/04/2005 */ -object XML extends XMLLoader[Elem] -{ +object XML extends XMLLoader[Elem] { val xml = "xml" val xmlns = "xmlns" val namespace = "http://www.w3.org/XML/1998/namespace" diff --git a/test/files/neg/t1364.check b/test/files/neg/t1364.check index 78375333c2..cb8803abdc 100644 --- a/test/files/neg/t1364.check +++ b/test/files/neg/t1364.check @@ -1,4 +1,4 @@ -t1364.scala:9: error: overriding type T in trait A with bounds >: Nothing <: AnyRef{type S[-U]}; +t1364.scala:9: error: overriding type T in trait A with bounds <: AnyRef{type S[-U]}; type T has incompatible type type T = { type S[U] = U } ^ diff --git a/test/files/neg/t1477.check b/test/files/neg/t1477.check index e497637857..72bffa3270 100644 --- a/test/files/neg/t1477.check +++ b/test/files/neg/t1477.check @@ -1,4 +1,4 @@ -t1477.scala:13: error: overriding type V in trait C with bounds >: Nothing <: Middle.this.D; +t1477.scala:13: error: overriding type V in trait C with bounds <: Middle.this.D; type V is a volatile type; cannot override a type with non-volatile upper bound type V <: (D with U) ^ diff --git a/test/files/neg/t2070.check b/test/files/neg/t2070.check index bd049409a8..ef1d08f7b7 100644 --- a/test/files/neg/t2070.check +++ b/test/files/neg/t2070.check @@ -1,5 +1,6 @@ t2070.scala:8: error: The kind of trait T does not conform to the expected kind of type T[X] in trait A. -t2070.B.T's type parameters do not match type T's expected parameters: type X (in object B) has one type parameter, but type X (in trait A) has none +t2070.B.T's type parameters do not match type T's expected parameters: +type X (in object B) has one type parameter, but type X (in trait A) has none trait T[X[_]] ^ one error found diff --git a/test/files/neg/t4044.check b/test/files/neg/t4044.check index 75dcf63bfe..41a04f69b9 100644 --- a/test/files/neg/t4044.check +++ b/test/files/neg/t4044.check @@ -2,15 +2,18 @@ t4044.scala:9: error: AnyRef takes no type parameters, expected: one M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *) ^ t4044.scala:9: error: kinds of the type arguments () do not conform to the expected kinds of the type parameters (type N). -'s type parameters do not match type N's expected parameters: has no type parameters, but type N has one +'s type parameters do not match type N's expected parameters: + has no type parameters, but type N has one M[AnyRef] // error, (AnyRef :: *) not kind-conformant to (N :: * -> * -> *) ^ t4044.scala:11: error: kinds of the type arguments (Test.A) do not conform to the expected kinds of the type parameters (type N). -Test.A's type parameters do not match type N's expected parameters: type _ has no type parameters, but type O has one +Test.A's type parameters do not match type N's expected parameters: +type _ has no type parameters, but type O has one M[A] // error, (A :: (* -> *) not kind-conformant to (N :: * -> * -> *) ^ t4044.scala:15: error: kinds of the type arguments (Test.C) do not conform to the expected kinds of the type parameters (type N). -Test.C's type parameters do not match type N's expected parameters: type _ has one type parameter, but type _ has none +Test.C's type parameters do not match type N's expected parameters: +type _ has one type parameter, but type _ has none M[C] // error, (C :: (* -> * -> * -> *) not kind-conformant to (N :: * -> * -> *) ^ four errors found diff --git a/test/files/neg/t5152.check b/test/files/neg/t5152.check index 80e0141b64..fd510dbae0 100644 --- a/test/files/neg/t5152.check +++ b/test/files/neg/t5152.check @@ -1,9 +1,11 @@ t5152.scala:7: error: kinds of the type arguments (Test.B) do not conform to the expected kinds of the type parameters (type E) in class A. -Test.B's type parameters do not match type E's expected parameters: type E has one type parameter, but type _ has none +Test.B's type parameters do not match type E's expected parameters: +type E has one type parameter, but type _ has none class B[E[_]] extends A[B] { } // B is depth 2 but A requires 1 ^ t5152.scala:11: error: kinds of the type arguments (Test.B1) do not conform to the expected kinds of the type parameters (type E) in class A1. -Test.B1's type parameters do not match type E's expected parameters: type _ has no type parameters, but type G has one +Test.B1's type parameters do not match type E's expected parameters: +type _ has no type parameters, but type G has one class B1[E[_]] extends A1[B1] // B1 is depth 2 but A1 requires 3 ^ two errors found diff --git a/test/files/neg/t708.check b/test/files/neg/t708.check index 15a9c9ed93..4983aab613 100644 --- a/test/files/neg/t708.check +++ b/test/files/neg/t708.check @@ -1,4 +1,4 @@ -t708.scala:8: error: overriding type S in trait X with bounds >: Nothing <: A.this.T; +t708.scala:8: error: overriding type S in trait X with bounds <: A.this.T; type S has incompatible type override private[A] type S = Any; ^ diff --git a/test/files/neg/t742.check b/test/files/neg/t742.check index f587948ef1..d355715442 100644 --- a/test/files/neg/t742.check +++ b/test/files/neg/t742.check @@ -1,5 +1,6 @@ t742.scala:5: error: kinds of the type arguments (Crash._1,Crash._2,Any) do not conform to the expected kinds of the type parameters (type m,type n,type z). -Crash._1's type parameters do not match type m's expected parameters: type s1 has one type parameter, but type n has two +Crash._1's type parameters do not match type m's expected parameters: +type s1 has one type parameter, but type n has two type p = mul[_1, _2, Any] // mul[_1, _1, Any] needs -Yrecursion ^ one error found diff --git a/test/files/neg/tcpoly_override.check b/test/files/neg/tcpoly_override.check index 95529329e8..dbc3ff9461 100644 --- a/test/files/neg/tcpoly_override.check +++ b/test/files/neg/tcpoly_override.check @@ -1,5 +1,6 @@ tcpoly_override.scala:9: error: The kind of type T does not conform to the expected kind of type T[_] in trait A. -C.this.T's type parameters do not match type T's expected parameters: type T (in class C) has no type parameters, but type T (in trait A) has one +C.this.T's type parameters do not match type T's expected parameters: +type T (in class C) has no type parameters, but type T (in trait A) has one type T = B // This compiles well (@M: ... but it shouldn't) ^ one error found diff --git a/test/files/neg/tcpoly_typealias.check b/test/files/neg/tcpoly_typealias.check index 670add2c04..4beac0e440 100644 --- a/test/files/neg/tcpoly_typealias.check +++ b/test/files/neg/tcpoly_typealias.check @@ -1,13 +1,16 @@ tcpoly_typealias.scala:37: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A. -BInv.this.m's type parameters do not match type m's expected parameters: type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant +BInv.this.m's type parameters do not match type m's expected parameters: +type x (in trait BInv) is invariant, but type x (in trait A) is declared covariant type m[x] = FooCov[x] // error: invariant x in alias def ^ tcpoly_typealias.scala:41: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A. -BCon.this.m's type parameters do not match type m's expected parameters: type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant +BCon.this.m's type parameters do not match type m's expected parameters: +type x (in trait BCon) is contravariant, but type x (in trait A) is declared covariant type m[-x] = FooCon[x] // error: contravariant x ^ tcpoly_typealias.scala:45: error: The kind of type m does not conform to the expected kind of type m[+x] in trait A. -BBound.this.m's type parameters do not match type m's expected parameters: type x (in trait BBound)'s bounds >: Nothing <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any +BBound.this.m's type parameters do not match type m's expected parameters: +type x (in trait BBound)'s bounds <: String are stricter than type x (in trait A)'s declared bounds >: Nothing <: Any type m[+x <: String] = FooBound[x] // error: x with stricter bound ^ three errors found diff --git a/test/files/neg/tcpoly_variance_enforce.check b/test/files/neg/tcpoly_variance_enforce.check index 44b5b2c15c..3299cc3435 100644 --- a/test/files/neg/tcpoly_variance_enforce.check +++ b/test/files/neg/tcpoly_variance_enforce.check @@ -1,45 +1,57 @@ tcpoly_variance_enforce.scala:15: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll. -FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant +FooInvar's type parameters do not match type m's expected parameters: +type x (in class FooInvar) is invariant, but type x is declared covariant object fcollinv extends coll[FooInvar] // error ^ tcpoly_variance_enforce.scala:16: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll. -FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant +FooContra's type parameters do not match type m's expected parameters: +type x (in class FooContra) is contravariant, but type x is declared covariant object fcollcon extends coll[FooContra] // error ^ tcpoly_variance_enforce.scala:17: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll. -FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any +FooString's type parameters do not match type m's expected parameters: +type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any object fcollwb extends coll[FooString] // error ^ tcpoly_variance_enforce.scala:19: error: kinds of the type arguments (FooCov) do not conform to the expected kinds of the type parameters (type m) in trait coll2. -FooCov's type parameters do not match type m's expected parameters: type x (in class FooCov) is covariant, but type x is declared contravariant +FooCov's type parameters do not match type m's expected parameters: +type x (in class FooCov) is covariant, but type x is declared contravariant object fcoll2ok extends coll2[FooCov] // error ^ tcpoly_variance_enforce.scala:20: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll2. -FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared contravariant +FooInvar's type parameters do not match type m's expected parameters: +type x (in class FooInvar) is invariant, but type x is declared contravariant object fcoll2inv extends coll2[FooInvar] // error ^ tcpoly_variance_enforce.scala:22: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll2. -FooString's type parameters do not match type m's expected parameters: type x (in class FooString) is covariant, but type x is declared contravarianttype x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any +FooString's type parameters do not match type m's expected parameters: +type x (in class FooString) is covariant, but type x is declared contravariant +type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any object fcoll2wb extends coll2[FooString] // error ^ tcpoly_variance_enforce.scala:27: error: kinds of the type arguments (FooString) do not conform to the expected kinds of the type parameters (type m) in trait coll3. -FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: Any +FooString's type parameters do not match type m's expected parameters: +type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds >: Nothing <: Any object fcoll3wb extends coll3[FooString] // error ^ tcpoly_variance_enforce.scala:30: error: kinds of the type arguments (FooString,Int) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4. -FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y +FooString's type parameters do not match type m's expected parameters: +type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y object fcoll4_1 extends coll4[FooString, Int] // error ^ tcpoly_variance_enforce.scala:31: error: kinds of the type arguments (FooString,Any) do not conform to the expected kinds of the type parameters (type m,type y) in trait coll4. -FooString's type parameters do not match type m's expected parameters: type x (in class FooString)'s bounds >: Nothing <: String are stricter than type x's declared bounds >: Nothing <: y +FooString's type parameters do not match type m's expected parameters: +type x (in class FooString)'s bounds <: String are stricter than type x's declared bounds <: y object fcoll4_2 extends coll4[FooString, Any] // error ^ tcpoly_variance_enforce.scala:37: error: kinds of the type arguments (FooInvar) do not conform to the expected kinds of the type parameters (type m) in trait coll. -FooInvar's type parameters do not match type m's expected parameters: type x (in class FooInvar) is invariant, but type x is declared covariant +FooInvar's type parameters do not match type m's expected parameters: +type x (in class FooInvar) is invariant, but type x is declared covariant def x: coll[FooInvar] = sys.error("foo") // error ^ tcpoly_variance_enforce.scala:38: error: kinds of the type arguments (FooContra) do not conform to the expected kinds of the type parameters (type m) in trait coll. -FooContra's type parameters do not match type m's expected parameters: type x (in class FooContra) is contravariant, but type x is declared covariant +FooContra's type parameters do not match type m's expected parameters: +type x (in class FooContra) is contravariant, but type x is declared covariant def y: coll[FooContra] = sys.error("foo") // error ^ 11 errors found diff --git a/test/files/run/existentials-in-compiler.check b/test/files/run/existentials-in-compiler.check index c8040a4cb1..83e3cdf435 100644 --- a/test/files/run/existentials-in-compiler.check +++ b/test/files/run/existentials-in-compiler.check @@ -1,156 +1,156 @@ -abstract trait Bippy[A <: AnyRef,B] extends Object +abstract trait Bippy[A <: AnyRef, B] extends Object extest.Bippy[_ <: AnyRef, _] -abstract trait BippyBud[A <: AnyRef,B,C <: List[A]] extends Object +abstract trait BippyBud[A <: AnyRef, B, C <: List[A]] extends Object extest.BippyBud[A,B,C] forSome { A <: AnyRef; B; C <: List[A] } -abstract trait BippyLike[A <: AnyRef,B <: List[A],This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object +abstract trait BippyLike[A <: AnyRef, B <: List[A], This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B]] extends Object extest.BippyLike[A,B,This] forSome { A <: AnyRef; B <: List[A]; This <: extest.BippyLike[A,B,This] with extest.Bippy[A,B] } -abstract trait Contra[-A >: AnyRef,-B] extends Object +abstract trait Contra[-A >: AnyRef, -B] extends Object extest.Contra[_ >: AnyRef, _] -abstract trait ContraLike[-A >: AnyRef,-B >: List[A]] extends Object +abstract trait ContraLike[-A >: AnyRef, -B >: List[A]] extends Object extest.ContraLike[A,B] forSome { -A >: AnyRef; -B >: List[A] } -abstract trait Cov01[+A <: AnyRef,+B] extends Object +abstract trait Cov01[+A <: AnyRef, +B] extends Object extest.Cov01[_ <: AnyRef, _] -abstract trait Cov02[+A <: AnyRef,B] extends Object +abstract trait Cov02[+A <: AnyRef, B] extends Object extest.Cov02[_ <: AnyRef, _] -abstract trait Cov03[+A <: AnyRef,-B] extends Object +abstract trait Cov03[+A <: AnyRef, -B] extends Object extest.Cov03[_ <: AnyRef, _] -abstract trait Cov04[A <: AnyRef,+B] extends Object +abstract trait Cov04[A <: AnyRef, +B] extends Object extest.Cov04[_ <: AnyRef, _] -abstract trait Cov05[A <: AnyRef,B] extends Object +abstract trait Cov05[A <: AnyRef, B] extends Object extest.Cov05[_ <: AnyRef, _] -abstract trait Cov06[A <: AnyRef,-B] extends Object +abstract trait Cov06[A <: AnyRef, -B] extends Object extest.Cov06[_ <: AnyRef, _] -abstract trait Cov07[-A <: AnyRef,+B] extends Object +abstract trait Cov07[-A <: AnyRef, +B] extends Object extest.Cov07[_ <: AnyRef, _] -abstract trait Cov08[-A <: AnyRef,B] extends Object +abstract trait Cov08[-A <: AnyRef, B] extends Object extest.Cov08[_ <: AnyRef, _] -abstract trait Cov09[-A <: AnyRef,-B] extends Object +abstract trait Cov09[-A <: AnyRef, -B] extends Object extest.Cov09[_ <: AnyRef, _] -abstract trait Cov11[+A <: AnyRef,+B <: List[_]] extends Object +abstract trait Cov11[+A <: AnyRef, +B <: List[_]] extends Object extest.Cov11[_ <: AnyRef, _ <: List[_]] -abstract trait Cov12[+A <: AnyRef,B <: List[_]] extends Object +abstract trait Cov12[+A <: AnyRef, B <: List[_]] extends Object extest.Cov12[_ <: AnyRef, _ <: List[_]] -abstract trait Cov13[+A <: AnyRef,-B <: List[_]] extends Object +abstract trait Cov13[+A <: AnyRef, -B <: List[_]] extends Object extest.Cov13[_ <: AnyRef, _ <: List[_]] -abstract trait Cov14[A <: AnyRef,+B <: List[_]] extends Object +abstract trait Cov14[A <: AnyRef, +B <: List[_]] extends Object extest.Cov14[_ <: AnyRef, _ <: List[_]] -abstract trait Cov15[A <: AnyRef,B <: List[_]] extends Object +abstract trait Cov15[A <: AnyRef, B <: List[_]] extends Object extest.Cov15[_ <: AnyRef, _ <: List[_]] -abstract trait Cov16[A <: AnyRef,-B <: List[_]] extends Object +abstract trait Cov16[A <: AnyRef, -B <: List[_]] extends Object extest.Cov16[_ <: AnyRef, _ <: List[_]] -abstract trait Cov17[-A <: AnyRef,+B <: List[_]] extends Object +abstract trait Cov17[-A <: AnyRef, +B <: List[_]] extends Object extest.Cov17[_ <: AnyRef, _ <: List[_]] -abstract trait Cov18[-A <: AnyRef,B <: List[_]] extends Object +abstract trait Cov18[-A <: AnyRef, B <: List[_]] extends Object extest.Cov18[_ <: AnyRef, _ <: List[_]] -abstract trait Cov19[-A <: AnyRef,-B <: List[_]] extends Object +abstract trait Cov19[-A <: AnyRef, -B <: List[_]] extends Object extest.Cov19[_ <: AnyRef, _ <: List[_]] -abstract trait Cov21[+A,+B] extends Object +abstract trait Cov21[+A, +B] extends Object extest.Cov21[_, _] -abstract trait Cov22[+A,B] extends Object +abstract trait Cov22[+A, B] extends Object extest.Cov22[_, _] -abstract trait Cov23[+A,-B] extends Object +abstract trait Cov23[+A, -B] extends Object extest.Cov23[_, _] -abstract trait Cov24[A,+B] extends Object +abstract trait Cov24[A, +B] extends Object extest.Cov24[_, _] -abstract trait Cov25[A,B] extends Object +abstract trait Cov25[A, B] extends Object extest.Cov25[_, _] -abstract trait Cov26[A,-B] extends Object +abstract trait Cov26[A, -B] extends Object extest.Cov26[_, _] -abstract trait Cov27[-A,+B] extends Object +abstract trait Cov27[-A, +B] extends Object extest.Cov27[_, _] -abstract trait Cov28[-A,B] extends Object +abstract trait Cov28[-A, B] extends Object extest.Cov28[_, _] -abstract trait Cov29[-A,-B] extends Object +abstract trait Cov29[-A, -B] extends Object extest.Cov29[_, _] -abstract trait Cov31[+A,+B,C <: (A, B)] extends Object +abstract trait Cov31[+A, +B, C <: (A, B)] extends Object extest.Cov31[A,B,C] forSome { +A; +B; C <: (A, B) } -abstract trait Cov32[+A,B,C <: (A, B)] extends Object +abstract trait Cov32[+A, B, C <: (A, B)] extends Object extest.Cov32[A,B,C] forSome { +A; B; C <: (A, B) } -abstract trait Cov33[+A,-B,C <: (A, _$10) forSome { type _$10 }] extends Object +abstract trait Cov33[+A, -B, C <: (A, _$10) forSome { type _$10 }] extends Object extest.Cov33[A,B,C] forSome { +A; -B; C <: (A, _$10) forSome { type _$10 } } -abstract trait Cov34[A,+B,C <: (A, B)] extends Object +abstract trait Cov34[A, +B, C <: (A, B)] extends Object extest.Cov34[A,B,C] forSome { A; +B; C <: (A, B) } -abstract trait Cov35[A,B,C <: (A, B)] extends Object +abstract trait Cov35[A, B, C <: (A, B)] extends Object extest.Cov35[A,B,C] forSome { A; B; C <: (A, B) } -abstract trait Cov36[A,-B,C <: (A, _$11) forSome { type _$11 }] extends Object +abstract trait Cov36[A, -B, C <: (A, _$11) forSome { type _$11 }] extends Object extest.Cov36[A,B,C] forSome { A; -B; C <: (A, _$11) forSome { type _$11 } } -abstract trait Cov37[-A,+B,C <: (_$12, B) forSome { type _$12 }] extends Object +abstract trait Cov37[-A, +B, C <: (_$12, B) forSome { type _$12 }] extends Object extest.Cov37[A,B,C] forSome { -A; +B; C <: (_$12, B) forSome { type _$12 } } -abstract trait Cov38[-A,B,C <: (_$13, B) forSome { type _$13 }] extends Object +abstract trait Cov38[-A, B, C <: (_$13, B) forSome { type _$13 }] extends Object extest.Cov38[A,B,C] forSome { -A; B; C <: (_$13, B) forSome { type _$13 } } -abstract trait Cov39[-A,-B,C <: Tuple2[_, _]] extends Object +abstract trait Cov39[-A, -B, C <: Tuple2[_, _]] extends Object extest.Cov39[_, _, _ <: Tuple2[_, _]] -abstract trait Cov41[+A >: Null,+B] extends Object +abstract trait Cov41[+A >: Null, +B] extends Object extest.Cov41[_ >: Null, _] -abstract trait Cov42[+A >: Null,B] extends Object +abstract trait Cov42[+A >: Null, B] extends Object extest.Cov42[_ >: Null, _] -abstract trait Cov43[+A >: Null,-B] extends Object +abstract trait Cov43[+A >: Null, -B] extends Object extest.Cov43[_ >: Null, _] -abstract trait Cov44[A >: Null,+B] extends Object +abstract trait Cov44[A >: Null, +B] extends Object extest.Cov44[_ >: Null, _] -abstract trait Cov45[A >: Null,B] extends Object +abstract trait Cov45[A >: Null, B] extends Object extest.Cov45[_ >: Null, _] -abstract trait Cov46[A >: Null,-B] extends Object +abstract trait Cov46[A >: Null, -B] extends Object extest.Cov46[_ >: Null, _] -abstract trait Cov47[-A >: Null,+B] extends Object +abstract trait Cov47[-A >: Null, +B] extends Object extest.Cov47[_ >: Null, _] -abstract trait Cov48[-A >: Null,B] extends Object +abstract trait Cov48[-A >: Null, B] extends Object extest.Cov48[_ >: Null, _] -abstract trait Cov49[-A >: Null,-B] extends Object +abstract trait Cov49[-A >: Null, -B] extends Object extest.Cov49[_ >: Null, _] -abstract trait Covariant[+A <: AnyRef,+B] extends Object +abstract trait Covariant[+A <: AnyRef, +B] extends Object extest.Covariant[_ <: AnyRef, _] -abstract trait CovariantLike[+A <: AnyRef,+B <: List[A],+This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object +abstract trait CovariantLike[+A <: AnyRef, +B <: List[A], +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B]] extends Object extest.CovariantLike[A,B,This] forSome { +A <: AnyRef; +B <: List[A]; +This <: extest.CovariantLike[A,B,This] with extest.Covariant[A,B] } diff --git a/test/files/run/reify_ann1a.check b/test/files/run/reify_ann1a.check index 97d4848a49..66dce778a8 100644 --- a/test/files/run/reify_ann1a.check +++ b/test/files/run/reify_ann1a.check @@ -1,5 +1,5 @@ { - @new ann(immutable.this.List.apply[String]("1a")) @new ann(immutable.this.List.apply[String]("1b")) class C[@new ann(immutable.this.List.apply[String]("2a")) @new ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends scala.AnyRef { + @new ann(immutable.this.List.apply[String]("1a")) @new ann(immutable.this.List.apply[String]("1b")) class C[@new ann(immutable.this.List.apply[String]("2a")) @new ann(immutable.this.List.apply[String]("2b")) T] extends scala.AnyRef { @new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) private[this] val x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b")) = _; def (@new ann(immutable.this.List.apply[String]("3a")) @new ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4a")) @ann(immutable.this.List.apply[String]("4b"))) = { super.(); @@ -14,7 +14,7 @@ () } { - @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T>: Nothing <: Any] extends scala.AnyRef { + @ann(immutable.this.List.apply[String]("1a")) @ann(immutable.this.List.apply[String]("1b")) class C[@ann(immutable.this.List.apply[String]("2a")) @ann(immutable.this.List.apply[String]("2b")) T] extends scala.AnyRef { @ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) private[this] val x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a")) = _; def (@ann(immutable.this.List.apply[String]("3a")) @ann(immutable.this.List.apply[String]("3b")) x: T @ann(immutable.this.List.apply[String]("4b")) @ann(immutable.this.List.apply[String]("4a"))): C[T] = { C.super.(); diff --git a/test/files/run/reify_ann1b.check b/test/files/run/reify_ann1b.check index ceebc0e2ed..9bc65a422e 100644 --- a/test/files/run/reify_ann1b.check +++ b/test/files/run/reify_ann1b.check @@ -1,5 +1,5 @@ { - @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T>: Nothing <: Any] extends scala.AnyRef { + @new ann(bar = "1a") @new ann(bar = "1b") class C[@new ann(bar = "2a") @new ann(bar = "2b") T] extends scala.AnyRef { @new ann(bar = "3a") @new ann(bar = "3b") private[this] val x: T @ann(bar = "4a") @ann(bar = "4b") = _; def (@new ann(bar = "3a") @new ann(bar = "3b") x: T @ann(bar = "4a") @ann(bar = "4b")) = { super.(); @@ -14,7 +14,7 @@ () } { - @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T>: Nothing <: Any] extends scala.AnyRef { + @ann(bar = "1a") @ann(bar = "1b") class C[@ann(bar = "2a") @ann(bar = "2b") T] extends scala.AnyRef { @ann(bar = "3a") @ann(bar = "3b") private[this] val x: T @ann(bar = "4b") @ann(bar = "4a") = _; def (@ann(bar = "3a") @ann(bar = "3b") x: T @ann(bar = "4b") @ann(bar = "4a")): C[T] = { C.super.(); -- cgit v1.2.3 From 1c65152c7aaeb3aeaf8a5e39e6fd51e5b4b95836 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 20 Mar 2012 19:56:56 -0700 Subject: Fix for stability failure. Pattern matcher! Totally unrelated to irrefutability, the pattern matcher at some point stopped sorting its lookup switch cases, and the butterfly's wings flapped enough to swap two cases. Now they're sorted in ascending order like they're supposed to be. --- .../tools/nsc/matching/ParallelMatching.scala | 2 +- test/files/pos/lookupswitch.scala | 37 ++++++++++++++++++++++ 2 files changed, 38 insertions(+), 1 deletion(-) create mode 100644 test/files/pos/lookupswitch.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index 1285e29d4a..be5a9907b8 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -309,7 +309,7 @@ trait ParallelMatching extends ast.TreeDSL } lazy val cases = - for ((tag, indices) <- literalMap.toList) yield { + for ((tag, indices) <- literalMap.toList.sortBy(_._1)) yield { val newRows = indices map (i => addDefaultVars(i)(rest rows i)) val r = remake(newRows ++ defaultRows, includeScrut = false) val r2 = make(r.tvars, r.rows map (x => x rebind bindVars(tag, x.subst))) diff --git a/test/files/pos/lookupswitch.scala b/test/files/pos/lookupswitch.scala new file mode 100644 index 0000000000..33594c0ea6 --- /dev/null +++ b/test/files/pos/lookupswitch.scala @@ -0,0 +1,37 @@ +// There's not a real test here, but on compilation the +// switch should have the cases arranged in order from 1-30. +class A { + def f(x: Int) = x match { + case 6 => "6" + case 18 => "18" + case 7 => "7" + case 2 => "2" + case 13 => "13" + case 11 => "11" + case 26 => "26" + case 27 => "27" + case 29 => "29" + case 25 => "25" + case 9 => "9" + case 17 => "17" + case 16 => "16" + case 1 => "1" + case 30 => "30" + case 15 => "15" + case 22 => "22" + case 19 => "19" + case 23 => "23" + case 8 => "8" + case 28 => "28" + case 5 => "5" + case 12 => "12" + case 10 => "10" + case 21 => "21" + case 24 => "24" + case 4 => "4" + case 14 => "14" + case 3 => "3" + case 20 => "20" + } +} + \ No newline at end of file -- cgit v1.2.3 From fcc63e2b667bdbaddbaacc843de97f8db02f6426 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 20 Mar 2012 20:15:45 -0700 Subject: Restore irrefutability commits. This reverts commit d8ba5d091e5641553b438ef9930a6023a2709dcd. --- src/compiler/scala/reflect/internal/TreeInfo.scala | 20 +++++++++++- .../scala/tools/nsc/ast/parser/TreeBuilder.scala | 34 +++++++++----------- .../scala/tools/nsc/typechecker/RefChecks.scala | 2 +- test/files/neg/t5589neg.check | 37 ++++++++++++++++++++++ test/files/neg/t5589neg.scala | 6 ++++ test/files/neg/t5589neg2.check | 9 ++++++ test/files/neg/t5589neg2.scala | 13 ++++++++ test/files/pos/irrefutable.scala | 22 +++++++++++++ test/files/pos/t1336.scala | 10 ++++++ test/files/pos/t5589.scala | 22 +++++++++++++ test/files/run/t4574.check | 2 ++ test/files/run/t4574.scala | 13 ++++++++ 12 files changed, 169 insertions(+), 21 deletions(-) create mode 100644 test/files/neg/t5589neg.check create mode 100644 test/files/neg/t5589neg.scala create mode 100644 test/files/neg/t5589neg2.check create mode 100644 test/files/neg/t5589neg2.scala create mode 100644 test/files/pos/irrefutable.scala create mode 100644 test/files/pos/t1336.scala create mode 100644 test/files/pos/t5589.scala create mode 100644 test/files/run/t4574.check create mode 100644 test/files/run/t4574.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/TreeInfo.scala b/src/compiler/scala/reflect/internal/TreeInfo.scala index 769d7a9ed1..ce3de94335 100644 --- a/src/compiler/scala/reflect/internal/TreeInfo.scala +++ b/src/compiler/scala/reflect/internal/TreeInfo.scala @@ -17,7 +17,7 @@ abstract class TreeInfo { val global: SymbolTable import global._ - import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass } + import definitions.{ isVarArgsList, isCastSymbol, ThrowableClass, TupleClass } /* Does not seem to be used. Not sure what it does anyway. def isOwnerDefinition(tree: Tree): Boolean = tree match { @@ -312,6 +312,24 @@ abstract class TreeInfo { case _ => false } + /** Is this tree comprised of nothing but identifiers, + * but possibly in bindings or tuples? For instance + * + * foo @ (bar, (baz, quux)) + * + * is a variable pattern; if the structure matches, + * then the remainder is inevitable. + */ + def isVariablePattern(tree: Tree): Boolean = tree match { + case Bind(name, pat) => isVariablePattern(pat) + case Ident(name) => true + case Apply(sel, args) => + ( isReferenceToScalaMember(sel, TupleClass(args.size).name.toTermName) + && (args forall isVariablePattern) + ) + case _ => false + } + /** Is this argument node of the form : _* ? */ def isWildcardStarArg(tree: Tree): Boolean = tree match { diff --git a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala index 0d2fbc5372..0bc88d1efd 100644 --- a/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala +++ b/src/compiler/scala/tools/nsc/ast/parser/TreeBuilder.scala @@ -262,29 +262,25 @@ abstract class TreeBuilder { else if (stats.length == 1) stats.head else Block(stats.init, stats.last) + def makeFilter(tree: Tree, condition: Tree, scrutineeName: String): Tree = { + val cases = List( + CaseDef(condition, EmptyTree, Literal(Constant(true))), + CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false))) + ) + val matchTree = makeVisitor(cases, false, scrutineeName) + + atPos(tree.pos)(Apply(Select(tree, nme.withFilter), matchTree :: Nil)) + } + /** Create tree for for-comprehension generator */ def makeGenerator(pos: Position, pat: Tree, valeq: Boolean, rhs: Tree): Enumerator = { val pat1 = patvarTransformer.transform(pat) val rhs1 = - if (valeq) rhs - else matchVarPattern(pat1) match { - case Some(_) => - rhs - case None => - atPos(rhs.pos) { - Apply( - Select(rhs, nme.filter), - List( - makeVisitor( - List( - CaseDef(pat1.duplicate, EmptyTree, Literal(Constant(true))), - CaseDef(Ident(nme.WILDCARD), EmptyTree, Literal(Constant(false)))), - false, - nme.CHECK_IF_REFUTABLE_STRING - ))) - } - } - if (valeq) ValEq(pos, pat1, rhs1) else ValFrom(pos, pat1, rhs1) + if (valeq || treeInfo.isVariablePattern(pat)) rhs + else makeFilter(rhs, pat1.duplicate, nme.CHECK_IF_REFUTABLE_STRING) + + if (valeq) ValEq(pos, pat1, rhs1) + else ValFrom(pos, pat1, rhs1) } def makeParam(pname: TermName, tpe: Tree) = diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index fa19f380fd..d98d248231 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -1445,7 +1445,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R private def transformApply(tree: Apply): Tree = tree match { case Apply( - Select(qual, nme.filter), + Select(qual, nme.filter | nme.withFilter), List(Function( List(ValDef(_, pname, tpt, _)), Match(_, CaseDef(pat1, _, _) :: _)))) diff --git a/test/files/neg/t5589neg.check b/test/files/neg/t5589neg.check new file mode 100644 index 0000000000..b3ff16d7e4 --- /dev/null +++ b/test/files/neg/t5589neg.check @@ -0,0 +1,37 @@ +t5589neg.scala:2: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:2: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:3: warning: `withFilter' method does not yet exist on Either.RightProjection[Int,String], using `filter' method instead + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:3: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:4: error: constructor cannot be instantiated to expected type; + found : (T1,) + required: (String, Int) + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:4: error: not found: value y2 + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:5: error: constructor cannot be instantiated to expected type; + found : (T1, T2, T3) + required: (String, Int) + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:5: error: not found: value y1 + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +t5589neg.scala:5: error: not found: value y2 + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) + ^ +two warnings found +7 errors found diff --git a/test/files/neg/t5589neg.scala b/test/files/neg/t5589neg.scala new file mode 100644 index 0000000000..31ff2c3693 --- /dev/null +++ b/test/files/neg/t5589neg.scala @@ -0,0 +1,6 @@ +class A { + def f5(x: Either[Int, String]) = for ((y1, y2: String) <- x.right) yield ((y1, y2)) + def f6(x: Either[Int, String]) = for ((y1, y2: Any) <- x.right) yield ((y1, y2)) + def f7(x: Either[Int, (String, Int)]) = for (y1 @ Tuple1(y2) <- x.right) yield ((y1, y2)) + def f8(x: Either[Int, (String, Int)]) = for ((y1, y2, y3) <- x.right) yield ((y1, y2)) +} diff --git a/test/files/neg/t5589neg2.check b/test/files/neg/t5589neg2.check new file mode 100644 index 0000000000..6af4955a83 --- /dev/null +++ b/test/files/neg/t5589neg2.check @@ -0,0 +1,9 @@ +t5589neg2.scala:7: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: String + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + ^ +t5589neg2.scala:7: error: not found: value d + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + ^ +two errors found diff --git a/test/files/neg/t5589neg2.scala b/test/files/neg/t5589neg2.scala new file mode 100644 index 0000000000..b7c7ab7218 --- /dev/null +++ b/test/files/neg/t5589neg2.scala @@ -0,0 +1,13 @@ +class A { + def f1(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, (c, d))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // ok + } + + def f2(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, (c, (d1, d2)))), es), fs), gs) <- x) yield (d :: es).mkString(", ") // not ok + } + + def f3(x: List[((((Int, (Double, (Float, String))), List[String]), List[Int]), List[Float])]) = { + for (((((a, (b, _)), es), fs), gs) <- x) yield (es ::: fs).mkString(", ") // ok + } +} \ No newline at end of file diff --git a/test/files/pos/irrefutable.scala b/test/files/pos/irrefutable.scala new file mode 100644 index 0000000000..0a792b644a --- /dev/null +++ b/test/files/pos/irrefutable.scala @@ -0,0 +1,22 @@ +// The test which this should perform but does not +// is that f1 is recognized as irrefutable and f2 is not +// This can be recognized via the generated classes: +// +// A$$anonfun$f1$1.class +// A$$anonfun$f2$1.class +// A$$anonfun$f2$2.class +// +// The extra one in $f2$ is the filter. +// +// !!! Marking with exclamation points so maybe someday +// this test will be finished. +class A { + case class Foo[T](x: T) + + def f1(xs: List[Foo[Int]]) = { + for (Foo(x: Int) <- xs) yield x + } + def f2(xs: List[Foo[Any]]) = { + for (Foo(x: Int) <- xs) yield x + } +} diff --git a/test/files/pos/t1336.scala b/test/files/pos/t1336.scala new file mode 100644 index 0000000000..63967985c7 --- /dev/null +++ b/test/files/pos/t1336.scala @@ -0,0 +1,10 @@ +object Foo { + def foreach( f : ((Int,Int)) => Unit ) { + println("foreach") + f(1,2) + } + + for( (a,b) <- this ) { + println((a,b)) + } +} diff --git a/test/files/pos/t5589.scala b/test/files/pos/t5589.scala new file mode 100644 index 0000000000..69cbb20391 --- /dev/null +++ b/test/files/pos/t5589.scala @@ -0,0 +1,22 @@ +class A { + // First three compile. + def f1(x: Either[Int, String]) = x.right map (y => y) + def f2(x: Either[Int, String]) = for (y <- x.right) yield y + def f3(x: Either[Int, (String, Int)]) = x.right map { case (y1, y2) => (y1, y2) } + // Last one fails. + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) +/** +./a.scala:5: error: constructor cannot be instantiated to expected type; + found : (T1, T2) + required: Either[Nothing,(String, Int)] + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y1 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +./a.scala:5: error: not found: value y2 + def f4(x: Either[Int, (String, Int)]) = for ((y1, y2) <- x.right) yield ((y1, y2)) + ^ +three errors found +**/ +} diff --git a/test/files/run/t4574.check b/test/files/run/t4574.check new file mode 100644 index 0000000000..a4522fff24 --- /dev/null +++ b/test/files/run/t4574.check @@ -0,0 +1,2 @@ +I hereby refute null! +I denounce null as unListLike! diff --git a/test/files/run/t4574.scala b/test/files/run/t4574.scala new file mode 100644 index 0000000000..1dde496aca --- /dev/null +++ b/test/files/run/t4574.scala @@ -0,0 +1,13 @@ +object Test { + val xs: List[(Int, Int)] = List((2, 2), null) + + def expectMatchError[T](msg: String)(body: => T) { + try { body ; assert(false, "Should not succeed.") } + catch { case _: MatchError => println(msg) } + } + + def main(args: Array[String]): Unit = { + expectMatchError("I hereby refute null!")( for ((x, y) <- xs) yield x ) + expectMatchError("I denounce null as unListLike!")( (null: Any) match { case List(_*) => true } ) + } +} -- cgit v1.2.3 From 437c626113711ebb5290c129611ee7f4b0c787f5 Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 21 Mar 2012 14:54:18 +0100 Subject: Allows now private primary constructors in value classes. --- src/compiler/scala/reflect/internal/Symbols.scala | 3 +- .../tools/nsc/transform/ExtensionMethods.scala | 4 ++ .../scala/tools/nsc/typechecker/TreeCheckers.scala | 4 +- .../scala/tools/nsc/typechecker/Typers.scala | 2 - test/files/run/valueclasses-constr.scala | 84 ++++++++++++++++++---- 5 files changed, 77 insertions(+), 20 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 2ba45c5972..f4039cf6d3 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -1872,7 +1872,8 @@ trait Symbols extends api.Symbols { self: SymbolTable => def unpackLocation: AnyRef = null /** Remove private modifier from symbol `sym`s definition. If `sym` is a - * term symbol rename it by expanding its name to avoid name clashes + * is not a constructor nor a static module rename it by expanding its name to avoid name clashes + * @param base the fully qualified name of this class will be appended if name expansion is needed */ final def makeNotPrivate(base: Symbol) { if (this.isPrivate) { diff --git a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala index 4c3972519a..5104518dd9 100644 --- a/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala +++ b/src/compiler/scala/tools/nsc/transform/ExtensionMethods.scala @@ -28,6 +28,9 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { /** the following two members override abstract members in Transform */ val phaseName: String = "extmethods" + /** The following flags may be set by this phase: */ + override def phaseNewFlags: Long = notPRIVATE + def newTransformer(unit: CompilationUnit): Transformer = new Extender(unit) @@ -101,6 +104,7 @@ abstract class ExtensionMethods extends Transform with TypingTransformers { case Template(_, _, _) => if (currentOwner.isDerivedValueClass) { extensionDefs(currentOwner.companionModule) = new mutable.ListBuffer[Tree] + currentOwner.primaryConstructor.makeNotPrivate(NoSymbol) super.transform(tree) } else if (currentOwner.isStaticOwner) { super.transform(tree) diff --git a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala index ed263cbbef..105c2c0b98 100644 --- a/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/TreeCheckers.scala @@ -263,8 +263,8 @@ abstract class TreeCheckers extends Analyzer { tree match { case x: PackageDef => - if (sym.ownerChain contains currentOwner) () - else fail(sym + " owner chain does not contain currentOwner " + currentOwner) + if ((sym.ownerChain contains currentOwner) || currentOwner == definitions.EmptyPackageClass) () + else fail(sym + " owner chain does not contain currentOwner " + currentOwner + sym.ownerChain) case _ => def cond(s: Symbol) = !s.isTerm || s.isMethod || s == sym.owner diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ad48712a32..893941984f 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1284,8 +1284,6 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { unit.error(clazz.pos, "value class may not be a "+ (if (clazz.owner.isTerm) "local class" else "member of another class")) val constr = clazz.primaryConstructor - if ((constr hasFlag (PRIVATE | PROTECTED)) || constr.privateWithin != NoSymbol) - unit.error(constr.pos, "value class must have public primary constructor") clazz.info.decls.toList.filter(acc => acc.isMethod && (acc hasFlag PARAMACCESSOR)) match { case List(acc) => def isUnderlyingAcc(sym: Symbol) = diff --git a/test/files/run/valueclasses-constr.scala b/test/files/run/valueclasses-constr.scala index 7a10299386..652d8d8d22 100644 --- a/test/files/run/valueclasses-constr.scala +++ b/test/files/run/valueclasses-constr.scala @@ -1,25 +1,79 @@ -object TOD { - final val SecondsPerDay = 86400 +package test1 { + object TOD { + final val SecondsPerDay = 86400 - def apply(seconds: Int) = { - val n = seconds % SecondsPerDay - new TOD(if (n >= 0) n else n + SecondsPerDay) - } + def apply(seconds: Int) = { + val n = seconds % SecondsPerDay + new TOD(if (n >= 0) n else n + SecondsPerDay) + } + } + + final class TOD (val secondsOfDay: Int) extends AnyVal { + def hours = secondsOfDay / 3600 + def minutes = (secondsOfDay / 60) % 60 + def seconds = secondsOfDay % 60 + + override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds) + } } +package test2 { + object TOD { + final val SecondsPerDay = 86400 + + def apply(seconds: Int) = { + val n = seconds % SecondsPerDay + new TOD(if (n >= 0) n else n + SecondsPerDay) + } + } + + final class TOD private[test2] (val secondsOfDay: Int) extends AnyVal { + def hours = secondsOfDay / 3600 + def minutes = (secondsOfDay / 60) % 60 + def seconds = secondsOfDay % 60 + + override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds) + } + + object Client { + def newTOD(x: Int) = new TOD(x) + } +} + +package test3 { + object TOD { + final val SecondsPerDay = 86400 + + def apply(seconds: Int) = { + val n = seconds % SecondsPerDay + new TOD(if (n >= 0) n else n + SecondsPerDay) + } + } -final class TOD (val secondsOfDay: Int) extends AnyVal { - def hours = secondsOfDay / 3600 - def minutes = (secondsOfDay / 60) % 60 - def seconds = secondsOfDay % 60 + final class TOD private (val secondsOfDay: Int) extends AnyVal { + def hours = secondsOfDay / 3600 + def minutes = (secondsOfDay / 60) % 60 + def seconds = secondsOfDay % 60 - override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds) + override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds) + } } object Test extends App { - val y: TOD = new TOD(1000) - val x: TOD = TOD(1000) - println(x.hours) - println(x) + val y1: test1.TOD = new test1.TOD(1000) + val y2: test2.TOD = test2.Client.newTOD(1000) + val x1: test1.TOD = test1.TOD(1000) + val x2: test2.TOD = test2.TOD(1000) + val x3: test3.TOD = test3.TOD(1000) + println(y1.minutes) + println(y1) + println(y2.minutes) + println(y2) + println(x1.minutes) + println(x1) + println(x2.minutes) + println(x2) + println(x3.minutes) + println(x3) } -- cgit v1.2.3 From bed3304bf86d88e372309063bd247e4cd9171a6f Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 21 Mar 2012 16:43:47 +0100 Subject: Fixed SI-5063. --- src/compiler/scala/tools/nsc/typechecker/Typers.scala | 8 ++------ test/files/neg/t5063.check | 4 ++++ test/files/neg/t5063.scala | 3 +++ 3 files changed, 9 insertions(+), 6 deletions(-) create mode 100644 test/files/neg/t5063.check create mode 100644 test/files/neg/t5063.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index 893941984f..49ce9712df 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -1158,6 +1158,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { val qtpe = qual.tpe.widen ( !isPastTyper && qual.isTerm + && !qual.isInstanceOf[Super] && ((qual.symbol eq null) || !qual.symbol.isTerm || qual.symbol.isValue) && !qtpe.isError && !qtpe.typeSymbol.isBottomClass @@ -1173,12 +1174,7 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { ) } - def adaptToMember(qual: Tree, searchTemplate: Type): Tree = - adaptToMember(qual, searchTemplate, true, true) - def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean): Tree = - adaptToMember(qual, searchTemplate, reportAmbiguous, true) - - def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean, saveErrors: Boolean): Tree = { + def adaptToMember(qual: Tree, searchTemplate: Type, reportAmbiguous: Boolean = true, saveErrors: Boolean = true): Tree = { if (isAdaptableWithView(qual)) { qual.tpe.widen.normalize match { case et: ExistentialType => diff --git a/test/files/neg/t5063.check b/test/files/neg/t5063.check new file mode 100644 index 0000000000..84690d0a1d --- /dev/null +++ b/test/files/neg/t5063.check @@ -0,0 +1,4 @@ +t5063.scala:2: error: value + is not a member of Object + super.+("") + ^ +one error found diff --git a/test/files/neg/t5063.scala b/test/files/neg/t5063.scala new file mode 100644 index 0000000000..5b34b53fb7 --- /dev/null +++ b/test/files/neg/t5063.scala @@ -0,0 +1,3 @@ +class A { + super.+("") +} -- cgit v1.2.3 From bd8bff3370a124330ee51747826dba855497487c Mon Sep 17 00:00:00 2001 From: Martin Odersky Date: Wed, 21 Mar 2012 18:23:28 +0100 Subject: Fixed test cases. --- test/files/neg/valueclasses.check | 5 +---- test/files/neg/valueclasses.scala | 23 ----------------------- test/files/run/valueclasses-constr.check | 10 +++++++++- 3 files changed, 10 insertions(+), 28 deletions(-) (limited to 'test/files') diff --git a/test/files/neg/valueclasses.check b/test/files/neg/valueclasses.check index 756a0474fa..4f042faded 100644 --- a/test/files/neg/valueclasses.check +++ b/test/files/neg/valueclasses.check @@ -40,7 +40,4 @@ class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail valueclasses.scala:31: error: value class needs to have exactly one public val parameter class V13(x: Int) extends AnyVal // fail ^ -valueclasses.scala:45: error: value class must have public primary constructor -final class TOD private (val secondsOfDay: Int) extends AnyVal { // should fail with private constructor - ^ -15 errors found +14 errors found diff --git a/test/files/neg/valueclasses.scala b/test/files/neg/valueclasses.scala index e405d95489..7cac94ab11 100644 --- a/test/files/neg/valueclasses.scala +++ b/test/files/neg/valueclasses.scala @@ -29,26 +29,3 @@ class V11[T](val x: List[T]) extends AnyVal // ok class V12[@specialized T, U](val x: (T, U)) extends AnyVal // fail class V13(x: Int) extends AnyVal // fail - - -package time { - -object TOD { - final val SecondsPerDay = 86400 - - def apply(seconds: Int) = { - val n = seconds % SecondsPerDay - new TOD(if (n >= 0) n else n + SecondsPerDay) - } -} - -final class TOD private (val secondsOfDay: Int) extends AnyVal { // should fail with private constructor - def hours = secondsOfDay / 3600 - def minutes = (secondsOfDay / 60) % 60 - def seconds = secondsOfDay % 60 - - override def toString = "%02d:%02d:%02d".format(hours, minutes, seconds) -} -} - - diff --git a/test/files/run/valueclasses-constr.check b/test/files/run/valueclasses-constr.check index df37fbc723..785e6fa25b 100644 --- a/test/files/run/valueclasses-constr.check +++ b/test/files/run/valueclasses-constr.check @@ -1,2 +1,10 @@ -0 +16 +00:16:40 +16 +00:16:40 +16 +00:16:40 +16 +00:16:40 +16 00:16:40 -- cgit v1.2.3 From 5d555ef90f443e20d2e46c668e456df0a643dae8 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 17 Mar 2012 21:12:51 -0700 Subject: Overhaul of JavaConver{sions,ters}. Initially motivated by SI-5580, then just motivated. I broke up the opaquely named JavaConversions and JavaConverters into the following traits encapsulating some permutation of { to java, to scala, bidirectional } { wrappers, decorators } I named everything consistently in terms of either Wrappers or Decorators. Decorators install those asJava/asScala methods onto collections of the right kind; Wrappers hide the process. JavaConversions then reduces to an object which (ill-advisedly) extends both WrapAsJava and WrapAsScala. And JavaConverters is an object extending DecorateAsScala and DecorateAsJava. However other more clearly named vals exist in the newly created scala.collection.convert package object. val decorateAsJava = new DecorateAsJava { } val decorateAsScala = new DecorateAsScala { } val decorateAll = new DecorateAsJava with DecorateAsScala { } val wrapAsJava = new WrapAsJava { } val wrapAsScala = new WrapAsScala { } val wrapAll = new WrapAsJava with WrapAsScala { } So for instance to import asScala decorators, and only those: scala> import scala.collection.convert.decorateAsScala._ import scala.collection.convert.decorateAsScala._ scala> new java.util.ArrayList[String].asScala groupBy (x => x) res0: scala.collection.immutable.Map[String,scala.collection.mutable.Buffer[String]] = Map() I propose we put those vals or a subset of them in the scala package object rather than way down in scala.collection.convert. --- src/library/scala/collection/JavaConversions.scala | 883 ++------------------- src/library/scala/collection/JavaConverters.scala | 484 +---------- .../scala/collection/convert/DecorateAsJava.scala | 296 +++++++ .../scala/collection/convert/DecorateAsScala.scala | 189 +++++ .../scala/collection/convert/Decorators.scala | 46 ++ .../scala/collection/convert/WrapAsJava.scala | 256 ++++++ .../scala/collection/convert/WrapAsScala.scala | 193 +++++ .../scala/collection/convert/Wrappers.scala | 422 ++++++++++ src/library/scala/collection/convert/package.scala | 18 + .../scala/collection/mutable/WeakHashMap.scala | 5 +- test/files/neg/saferJavaConversions.check | 6 + test/files/neg/saferJavaConversions.scala | 20 + test/files/neg/t5580a.check | 6 + test/files/neg/t5580a.scala | 11 + test/files/pos/t5580b.scala | 19 + 15 files changed, 1551 insertions(+), 1303 deletions(-) create mode 100644 src/library/scala/collection/convert/DecorateAsJava.scala create mode 100644 src/library/scala/collection/convert/DecorateAsScala.scala create mode 100644 src/library/scala/collection/convert/Decorators.scala create mode 100644 src/library/scala/collection/convert/WrapAsJava.scala create mode 100644 src/library/scala/collection/convert/WrapAsScala.scala create mode 100644 src/library/scala/collection/convert/Wrappers.scala create mode 100644 src/library/scala/collection/convert/package.scala create mode 100644 test/files/neg/saferJavaConversions.check create mode 100644 test/files/neg/saferJavaConversions.scala create mode 100644 test/files/neg/t5580a.check create mode 100644 test/files/neg/t5580a.scala create mode 100644 test/files/pos/t5580b.scala (limited to 'test/files') diff --git a/src/library/scala/collection/JavaConversions.scala b/src/library/scala/collection/JavaConversions.scala index 50919e506a..75ab3f28f5 100644 --- a/src/library/scala/collection/JavaConversions.scala +++ b/src/library/scala/collection/JavaConversions.scala @@ -8,6 +8,9 @@ package scala.collection +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import convert._ + /** A collection of implicit conversions supporting interoperability between * Scala and Java collections. * @@ -46,877 +49,81 @@ package scala.collection * @author Martin Odersky * @since 2.8 */ -object JavaConversions { +object JavaConversions extends WrapAsScala with WrapAsJava { + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ConcurrentMapWrapper[A, B] = Wrappers.ConcurrentMapWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type DictionaryWrapper[A, B] = Wrappers.DictionaryWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IterableWrapper[A] = Wrappers.IterableWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type IteratorWrapper[A] = Wrappers.IteratorWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JCollectionWrapper[A] = Wrappers.JCollectionWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JConcurrentMapWrapper[A, B] = Wrappers.JConcurrentMapWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JDictionaryWrapper[A, B] = Wrappers.JDictionaryWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JEnumerationWrapper[A] = Wrappers.JEnumerationWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIterableWrapper[A] = Wrappers.JIterableWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JIteratorWrapper[A] = Wrappers.JIteratorWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JListWrapper[A] = Wrappers.JListWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JMapWrapper[A, B] = Wrappers.JMapWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JPropertiesWrapper = Wrappers.JPropertiesWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type JSetWrapper[A] = Wrappers.JSetWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MapWrapper[A, B] = Wrappers.MapWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableBufferWrapper[A] = Wrappers.MutableBufferWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableMapWrapper[A, B] = Wrappers.MutableMapWrapper[A, B] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSeqWrapper[A] = Wrappers.MutableSeqWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type MutableSetWrapper[A] = Wrappers.MutableSetWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SeqWrapper[A] = Wrappers.SeqWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type SetWrapper[A] = Wrappers.SetWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") type ToIteratorWrapper[A] = Wrappers.ToIteratorWrapper[A] + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val DictionaryWrapper = Wrappers.DictionaryWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IterableWrapper = Wrappers.IterableWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val IteratorWrapper = Wrappers.IteratorWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JCollectionWrapper = Wrappers.JCollectionWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JConcurrentMapWrapper = Wrappers.JConcurrentMapWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JDictionaryWrapper = Wrappers.JDictionaryWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JEnumerationWrapper = Wrappers.JEnumerationWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIterableWrapper = Wrappers.JIterableWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JIteratorWrapper = Wrappers.JIteratorWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JListWrapper = Wrappers.JListWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JMapWrapper = Wrappers.JMapWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JPropertiesWrapper = Wrappers.JPropertiesWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val JSetWrapper = Wrappers.JSetWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableBufferWrapper = Wrappers.MutableBufferWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableMapWrapper = Wrappers.MutableMapWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSeqWrapper = Wrappers.MutableSeqWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val MutableSetWrapper = Wrappers.MutableSetWrapper + @deprecated("Use a member of scala.collection.convert.Wrappers", "2.10.0") val SeqWrapper = Wrappers.SeqWrapper + // Note to implementors: the cavalcade of deprecated methods herein should // serve as a warning to any who follow: don't overload implicit methods. - import java.{ lang => jl, util => ju } - import java.util.{ concurrent => juc } - - // Scala => Java - - /** - * Implicitly converts a Scala Iterator to a Java Iterator. - * The returned Java Iterator is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Iterator)` then the original - * Java Iterator will be returned. - * - * @param i The Iterator to be converted. - * @return A Java Iterator view of the argument. - */ - implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match { - case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterator to a Java Enumeration. - * The returned Java Enumeration is backed by the provided Scala - * Iterator and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterator was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Enumeration)` then the - * original Java Enumeration will be returned. - * - * @param i The Iterator to be converted. - * @return A Java Enumeration view of the argument. - */ - implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match { - case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] - case _ => IteratorWrapper(it) - } - - /** - * Implicitly converts a Scala Iterable to a Java Iterable. - * The returned Java Iterable is backed by the provided Scala - * Iterable and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asIterable(java.lang.Iterable)` then the original - * Java Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Java Iterable view of the argument. - */ - implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { - case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] - case _ => IterableWrapper(i) - } - - /** - * Implicitly converts a Scala Iterable to an immutable Java - * Collection. - * - * If the Scala Iterable was previously obtained from an implicit or - * explicit call of `asSizedIterable(java.util.Collection)` then the original - * Java Collection will be returned. - * - * @param i The SizedIterable to be converted. - * @return A Java Collection view of the argument. - */ - implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match { - case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] - case _ => new IterableWrapper(it) - } - - /** - * Implicitly converts a Scala mutable Buffer to a Java List. - * The returned Java List is backed by the provided Scala - * Buffer and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Buffer was previously obtained from an implicit or - * explicit call of `asBuffer(java.util.List)` then the original - * Java List will be returned. - * - * @param b The Buffer to be converted. - * @return A Java List view of the argument. - */ - implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { - case JListWrapper(wrapped) => wrapped - case _ => new MutableBufferWrapper(b) - } @deprecated("use bufferAsJavaList instead", "2.9.0") def asJavaList[A](b : mutable.Buffer[A]): ju.List[A] = bufferAsJavaList[A](b) - /** - * Implicitly converts a Scala mutable Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param b The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match { - case JListWrapper(wrapped) => wrapped - case _ => new MutableSeqWrapper(seq) - } @deprecated("use mutableSeqAsJavaList instead", "2.9.0") def asJavaList[A](b : mutable.Seq[A]): ju.List[A] = mutableSeqAsJavaList[A](b) - /** - * Implicitly converts a Scala Seq to a Java List. - * The returned Java List is backed by the provided Scala - * Seq and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Seq was previously obtained from an implicit or - * explicit call of `asSeq(java.util.List)` then the original - * Java List will be returned. - * - * @param b The Seq to be converted. - * @return A Java List view of the argument. - */ - implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match { - case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] - case _ => new SeqWrapper(seq) - } - @deprecated("use seqAsJavaList instead", "2.9.0") def asJavaList[A](b : Seq[A]): ju.List[A] = seqAsJavaList[A](b) - /** - * Implicitly converts a Scala mutable Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of `asSet(java.util.Set)` then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { - case JSetWrapper(wrapped) => wrapped - case _ => new MutableSetWrapper(s) - } - @deprecated("use mutableSetAsJavaSet instead", "2.9.0") def asJavaSet[A](s : mutable.Set[A]): ju.Set[A] = mutableSetAsJavaSet[A](s) - /** - * Implicitly converts a Scala Set to a Java Set. - * The returned Java Set is backed by the provided Scala - * Set and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Set was previously obtained from an implicit or - * explicit call of asSet(java.util.Set) then the original - * Java Set will be returned. - * - * @param s The Set to be converted. - * @return A Java Set view of the argument. - */ - implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { - case JSetWrapper(wrapped) => wrapped - case _ => new SetWrapper(s) - } - @deprecated("use setAsJavaSet instead", "2.9.0") def asJavaSet[A](s: Set[A]): ju.Set[A] = setAsJavaSet[A](s) - /** - * Implicitly converts a Scala mutable Map to a Java Map. - * The returned Java Map is backed by the provided Scala - * Map and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala Map was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java Map will be returned. - * - * @param m The Map to be converted. - * @return A Java Map view of the argument. - */ - implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { - //case JConcurrentMapWrapper(wrapped) => wrapped - case JMapWrapper(wrapped) => wrapped - case _ => new MutableMapWrapper(m) - } - @deprecated("use mutableMapAsJavaMap instead", "2.9.0") def asJavaMap[A, B](m : mutable.Map[A, B]): ju.Map[A, B] = mutableMapAsJavaMap[A, B](m) - /** - * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. - * - * The returned Java `Dictionary` is backed by the provided Scala - * `Dictionary` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `Dictionary` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Dictionary)` then the original - * Java Dictionary will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Dictionary` view of the argument. - */ - implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { - //case JConcurrentMapWrapper(wrapped) => wrapped - case JDictionaryWrapper(wrapped) => wrapped - case _ => new DictionaryWrapper(m) - } - - /** - * Implicitly converts a Scala `Map` to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and - * any side-effects of using it via the Java interface will be visible - * via the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Map)` then the original - * Java `Map` will be returned. - * - * @param m The `Map` to be converted. - * @return A Java `Map` view of the argument. - */ - implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { - //case JConcurrentMapWrapper(wrapped) => wrapped - case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] - case _ => new MapWrapper(m) - } - @deprecated("use mapAsJavaMap instead", "2.9.0") def asJavaMap[A, B](m : Map[A, B]): ju.Map[A, B] = mapAsJavaMap[A, B](m) - /** - * Implicitly converts a Scala mutable `ConcurrentMap` to a Java - * `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `ConcurrentMap` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)` - * then the original Java ConcurrentMap will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return A Java `ConcurrentMap` view of the argument. - */ - implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match { - case JConcurrentMapWrapper(wrapped) => wrapped - case _ => new ConcurrentMapWrapper(m) - } - - // Java => Scala - - /** - * Implicitly converts a Java `Iterator` to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Iterator` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterator` was previously obtained from an implicit or - * explicit call of `asIterator(scala.collection.Iterator)` then the - * original Scala `Iterator` will be returned. - * - * @param i The `Iterator` to be converted. - * @return A Scala `Iterator` view of the argument. - */ - implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match { - case IteratorWrapper(wrapped) => wrapped - case _ => JIteratorWrapper(it) - } - - /** - * Implicitly converts a Java Enumeration to a Scala Iterator. - * The returned Scala Iterator is backed by the provided Java - * Enumeration and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Enumeration was previously obtained from an implicit or - * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)` - * then the original Scala Iterator will be returned. - * - * @param i The Enumeration to be converted. - * @return A Scala Iterator view of the argument. - */ - implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { - case IteratorWrapper(wrapped) => wrapped - case _ => JEnumerationWrapper(i) - } - - /** - * Implicitly converts a Java `Iterable` to a Scala `Iterable`. - * - * The returned Scala `Iterable` is backed by the provided Java `Iterable` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterable` was previously obtained from an implicit or - * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)` - * then the original Scala Iterable will be returned. - * - * @param i The Iterable to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { - case IterableWrapper(wrapped) => wrapped - case _ => JIterableWrapper(i) - } - @deprecated("use iterableAsScalaIterable instead", "2.9.0") def asScalaIterable[A](i : jl.Iterable[A]): Iterable[A] = iterableAsScalaIterable[A](i) - /** - * Implicitly converts a Java `Collection` to an Scala `Iterable`. - * - * If the Java `Collection` was previously obtained from an implicit or - * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)` - * then the original Scala `Iterable` will be returned. - * - * @param i The Collection to be converted. - * @return A Scala Iterable view of the argument. - */ - implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { - case IterableWrapper(wrapped) => wrapped - case _ => JCollectionWrapper(i) - } @deprecated("use collectionAsScalaIterable instead", "2.9.0") def asScalaIterable[A](i : ju.Collection[A]): Iterable[A] = collectionAsScalaIterable[A](i) - /** - * Implicitly converts a Java `List` to a Scala mutable `Buffer`. - * - * The returned Scala `Buffer` is backed by the provided Java `List` - * and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java `List` was previously obtained from an implicit or - * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)` - * then the original Scala `Buffer` will be returned. - * - * @param l The `List` to be converted. - * @return A Scala mutable `Buffer` view of the argument. - */ - implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { - case MutableBufferWrapper(wrapped) => wrapped - case _ =>new JListWrapper(l) - } - - /** - * Implicitly converts a Java Set to a Scala mutable Set. - * The returned Scala Set is backed by the provided Java - * Set and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java Set was previously obtained from an implicit or - * explicit call of `asScalaSet(scala.collection.mutable.Set)` then - * the original Scala Set will be returned. - * - * @param s The Set to be converted. - * @return A Scala mutable Set view of the argument. - */ - implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { - case MutableSetWrapper(wrapped) => wrapped - case _ =>new JSetWrapper(s) - } - - /** - * Implicitly converts a Java `Map` to a Scala mutable `Map`. - * - * The returned Scala `Map` is backed by the provided Java `Map` and any - * side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * If the Java `Map` was previously obtained from an implicit or - * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then - * the original Scala Map will be returned. - * - * @param m The Map to be converted. - * @return A Scala mutable Map view of the argument. - */ - implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { - //case ConcurrentMapWrapper(wrapped) => wrapped - case MutableMapWrapper(wrapped) => wrapped - case _ => new JMapWrapper(m) - } - @deprecated("use mapAsScalaMap instead", "2.9.0") def asScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = mapAsScalaMap[A, B](m) - /** - * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. - * The returned Scala ConcurrentMap is backed by the provided Java - * ConcurrentMap and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java ConcurrentMap was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala ConcurrentMap will be returned. - * - * @param m The ConcurrentMap to be converted. - * @return A Scala mutable ConcurrentMap view of the argument. - */ - implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match { - case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying - case _ => new JConcurrentMapWrapper(m) - } - - /** - * Implicitly converts a Java `Dictionary` to a Scala mutable - * `Map[String, String]`. - * - * The returned Scala `Map[String, String]` is backed by the provided Java - * `Dictionary` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param m The Dictionary to be converted. - * @return A Scala mutable Map[String, String] view of the argument. - */ - implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { - case DictionaryWrapper(wrapped) => wrapped - case _ => new JDictionaryWrapper(p) - } - - /** - * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. - * - * The returned Scala `Map[String, String]` is backed by the provided Java - * `Properties` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * @param m The Properties to be converted. - * @return A Scala mutable Map[String, String] view of the argument. - */ - implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { - case _ => new JPropertiesWrapper(p) - } - @deprecated("use propertiesAsScalaMap instead", "2.9.0") def asScalaMap(p: ju.Properties): mutable.Map[String, String] = propertiesAsScalaMap(p) - - // Private implementations (shared by JavaConverters) ... - - trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { - val underlying: Iterable[A] - def size = underlying.size - override def iterator = IteratorWrapper(underlying.iterator) - override def isEmpty = underlying.isEmpty - } - - case class IteratorWrapper[A](underlying: Iterator[A]) - extends ju.Iterator[A] with ju.Enumeration[A] { - def hasNext = underlying.hasNext - def next() = underlying.next - def hasMoreElements = underlying.hasNext - def nextElement() = underlying.next - def remove() = throw new UnsupportedOperationException - } - - class ToIteratorWrapper[A](underlying : Iterator[A]) { - def asJava = new IteratorWrapper(underlying) - } - - case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasNext - def next() = underlying.next - } - - case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] { - def hasNext = underlying.hasMoreElements - def next() = underlying.nextElement - } - - case class IterableWrapper[A](underlying: Iterable[A]) - extends ju.AbstractCollection[A] - with IterableWrapperTrait[A] { } - - case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] { - def iterator = underlying.iterator - override def size = underlying.size - override def isEmpty = underlying.isEmpty - def newBuilder[B] = new mutable.ArrayBuffer[B] - } - - case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - } - - case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) - extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { - val p = underlying(i) - underlying(i) = elem - p - } - } - - case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) - extends ju.AbstractList[A] with IterableWrapperTrait[A] { - def get(i: Int) = underlying(i) - override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } - override def add(elem: A) = { underlying append elem; true } - override def remove(i: Int) = underlying remove i - } - - case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { - def length = underlying.size - override def isEmpty = underlying.isEmpty - override def iterator: Iterator[A] = underlying.iterator - def apply(i: Int) = underlying.get(i) - def update(i: Int, elem: A) = underlying.set(i, elem) - def +=:(elem: A) = { underlying.subList(0, 0) add elem; this } - def +=(elem: A): this.type = { underlying add elem; this } - def insertAll(i: Int, elems: Traversable[A]) = { - val ins = underlying.subList(0, i) - elems.seq.foreach(ins.add(_)) - } - def remove(i: Int) = underlying.remove(i) - def clear() = underlying.clear() - def result = this - } - - class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] { - self => - def size = underlying.size - def iterator = new ju.Iterator[A] { - val ui = underlying.iterator - var prev: Option[A] = None - def hasNext = ui.hasNext - def next = { val e = ui.next; prev = Some(e); e } - def remove = prev match { - case Some(e) => - underlying match { - case ms: mutable.Set[a] => - ms remove e - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - - case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) { - override def add(elem: A) = { - val sz = underlying.size - underlying += elem - sz < underlying.size - } - override def remove(elem: AnyRef) = - try underlying remove elem.asInstanceOf[A] - catch { case ex: ClassCastException => false } - override def clear() = underlying.clear() - } - - case class JSetWrapper[A](underlying: ju.Set[A]) - extends mutable.AbstractSet[A] - with mutable.Set[A] - with mutable.SetLike[A, JSetWrapper[A]] { - - override def size = underlying.size - - def iterator = underlying.iterator - - def contains(elem: A): Boolean = underlying.contains(elem) - - def +=(elem: A): this.type = { underlying add elem; this } - def -=(elem: A): this.type = { underlying remove elem; this } - - override def add(elem: A): Boolean = underlying add elem - override def remove(elem: A): Boolean = underlying remove elem - override def clear() = underlying.clear() - - override def empty = JSetWrapper(new ju.HashSet[A]) - } - - class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self => - override def size = underlying.size - - override def get(key: AnyRef): B = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] { - def size = self.size - - def iterator = new ju.Iterator[ju.Map.Entry[A, B]] { - val ui = underlying.iterator - var prev : Option[A] = None - - def hasNext = ui.hasNext - - def next() = { - val (k, v) = ui.next - prev = Some(k) - new ju.Map.Entry[A, B] { - def getKey = k - def getValue = v - def setValue(v1 : B) = self.put(k, v1) - override def hashCode = k.hashCode + v.hashCode - override def equals(other: Any) = other match { - case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue - case _ => false - } - } - } - - def remove() { - prev match { - case Some(k) => - underlying match { - case mm: mutable.Map[a, _] => - mm remove k - prev = None - case _ => - throw new UnsupportedOperationException("remove") - } - case _ => - throw new IllegalStateException("next must be called at least once before remove") - } - } - } - } - } - - case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) - extends MapWrapper[A, B](underlying) { - override def put(k: A, v: B) = underlying.put(k, v) match { - case Some(v1) => v1 - case None => null.asInstanceOf[B] - } - - override def remove(k: AnyRef): B = try { - underlying remove k.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - - override def clear() = underlying.clear() - } - - trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] - extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] { - def underlying: ju.Map[A, B] - - override def size = underlying.size - - def get(k: A) = { - val v = underlying get k - if (v != null) - Some(v) - else if (underlying containsKey k) - Some(null.asInstanceOf[B]) - else - None - } - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = { - val r = underlying.put(k, v) - if (r != null) Some(r) else None - } - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = { - val r = underlying remove k - if (r != null) Some(r) else None - } - - def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { val e = ui.next(); (e.getKey, e.getValue) } - } - - override def clear() = underlying.clear() - - override def empty: Repr = null.asInstanceOf[Repr] - } - - case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) - extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { - override def empty = JMapWrapper(new ju.HashMap[A, B]) - } - - class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) - extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { - - def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - def remove(k: AnyRef, v: AnyRef) = try { - underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B]) - } catch { - case ex: ClassCastException => - false - } - - def replace(k: A, v: B): B = underlying.replace(k, v) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - - def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) - } - - case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) - extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] { - override def get(k: A) = { - val v = underlying get k - if (v != null) Some(v) - else None - } - - override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B]) - - def putIfAbsent(k: A, v: B): Option[B] = { - val r = underlying.putIfAbsent(k, v) - if (r != null) Some(r) else None - } - - def remove(k: A, v: B): Boolean = underlying.remove(k, v) - - def replace(k: A, v: B): Option[B] = { - val prev = underlying.replace(k, v) - if (prev != null) Some(prev) else None - } - - def replace(k: A, oldvalue: B, newvalue: B): Boolean = - underlying.replace(k, oldvalue, newvalue) - } - - case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) - extends ju.Dictionary[A, B] { - def size: Int = underlying.size - def isEmpty: Boolean = underlying.isEmpty - def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator) - def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator) - def get(key: AnyRef) = try { - underlying get key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - def put(key: A, value: B): B = underlying.put(key, value) match { - case Some(v) => v - case None => null.asInstanceOf[B] - } - override def remove(key: AnyRef) = try { - underlying remove key.asInstanceOf[A] match { - case None => null.asInstanceOf[B] - case Some(v) => v - } - } catch { - case ex: ClassCastException => null.asInstanceOf[B] - } - } - - case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) - extends mutable.AbstractMap[A, B] with mutable.Map[A, B] { - override def size: Int = underlying.size - - def get(k: A) = { - val v = underlying get k - if (v != null) Some(v) else None - } - - def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: A): this.type = { underlying remove key; this } - - override def put(k: A, v: B): Option[B] = { - val r = underlying.put(k, v) - if (r != null) Some(r) else None - } - - override def update(k: A, v: B) { underlying.put(k, v) } - - override def remove(k: A): Option[B] = { - val r = underlying remove k - if (r != null) Some(r) else None - } - - def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k)) - - override def clear() = underlying.clear() - } - - case class JPropertiesWrapper(underlying: ju.Properties) - extends mutable.AbstractMap[String, String] - with mutable.Map[String, String] - with mutable.MapLike[String, String, JPropertiesWrapper] { - - override def size = underlying.size - - def get(k: String) = { - val v = underlying get k - if (v != null) Some(v.asInstanceOf[String]) else None - } - - def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } - def -=(key: String): this.type = { underlying remove key; this } - - override def put(k: String, v: String): Option[String] = { - val r = underlying.put(k, v) - if (r != null) Some(r.asInstanceOf[String]) else None - } - - override def update(k: String, v: String) { underlying.put(k, v) } - - override def remove(k: String): Option[String] = { - val r = underlying remove k - if (r != null) Some(r.asInstanceOf[String]) else None - } - - def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { - val ui = underlying.entrySet.iterator - def hasNext = ui.hasNext - def next() = { - val e = ui.next() - (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) - } - } - - override def clear() = underlying.clear() - - override def empty = JPropertiesWrapper(new ju.Properties) - - def getProperty(key: String) = underlying.getProperty(key) - - def getProperty(key: String, defaultValue: String) = - underlying.getProperty(key, defaultValue) - - def setProperty(key: String, value: String) = - underlying.setProperty(key, value) - } } diff --git a/src/library/scala/collection/JavaConverters.scala b/src/library/scala/collection/JavaConverters.scala index d213e60112..07e8518cb0 100755 --- a/src/library/scala/collection/JavaConverters.scala +++ b/src/library/scala/collection/JavaConverters.scala @@ -8,6 +8,14 @@ package scala.collection +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import convert._ + +// TODO: I cleaned all this documentation up in JavaConversions, but the +// documentation in here is basically the pre-cleaned-up version with minor +// additions. Would be nice to have in one place. + + /** A collection of decorators that allow to convert between * Scala and Java collections using `asScala` and `asJava` methods. * @@ -48,494 +56,48 @@ package scala.collection * @author Martin Odersky * @since 2.8.1 */ - -trait JavaConverters { - import java.{ lang => jl, util => ju } - import java.util.{ concurrent => juc } - import JavaConversions._ - - // TODO: I cleaned all this documentation up in JavaConversions, but the - // documentation in here is basically the pre-cleaned-up version with minor - // additions. Would be nice to have in one place. - - // Conversion decorator classes - - /** Generic class containing the `asJava` converter method */ - class AsJava[C](op: => C) { - /** Converts a Scala collection to the corresponding Java collection */ - def asJava: C = op - } - - /** Generic class containing the `asScala` converter method */ - class AsScala[C](op: => C) { - /** Converts a Java collection to the corresponding Scala collection */ - def asScala: C = op - } - - /** Generic class containing the `asJavaCollection` converter method */ - class AsJavaCollection[A](i: Iterable[A]) { - /** Converts a Scala `Iterable` to a Java `Collection` */ - def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i) - } - - /** Generic class containing the `asJavaEnumeration` converter method */ - class AsJavaEnumeration[A](i: Iterator[A]) { - /** Converts a Scala `Iterator` to a Java `Enumeration` */ - def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i) - } - - /** Generic class containing the `asJavaDictionary` converter method */ - class AsJavaDictionary[A, B](m : mutable.Map[A, B]) { - /** Converts a Scala `Map` to a Java `Dictionary` */ - def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m) - } - - // Scala => Java - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a - * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala - * `Iterator` and any side-effects of using it via the Java interface will - * be visible via the Scala interface and vice versa. - * - * If the Scala `Iterator` was previously obtained from an implicit or explicit - * call of `asIterator(java.util.Iterator)` then the original Java `Iterator` - * will be returned by the `asJava` method. - * - * @param i The `Iterator` to be converted. - * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument. - */ - implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = - new AsJava(asJavaIterator(i)) - - /** - * Adds an `asJavaEnumeration` method that implicitly converts a Scala - * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is - * backed by the provided Scala `Iterator` and any side-effects of using - * it via the Java interface will be visible via the Scala interface and - * vice versa. - * - * If the Scala `Iterator` was previously obtained from an implicit or - * explicit call of `asIterator(java.util.Enumeration)` then the - * original Java `Enumeration` will be returned. - * - * @param i The `Iterator` to be converted. - * @return An object with an `asJavaEnumeration` method that returns a Java - * `Enumeration` view of the argument. - */ - implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = - new AsJavaEnumeration(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Iterable` to - * a Java `Iterable`. - * - * The returned Java `Iterable` is backed by the provided Scala `Iterable` - * and any side-effects of using it via the Java interface will be visible - * via the Scala interface and vice versa. - * - * If the Scala `Iterable` was previously obtained from an implicit or - * explicit call of `asIterable(java.lang.Iterable)` then the original - * Java `Iterable` will be returned. - * - * @param i The `Iterable` to be converted. - * @return An object with an `asJavaCollection` method that returns a Java - * `Iterable` view of the argument. - */ - implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = - new AsJava(asJavaIterable(i)) - - /** - * Adds an `asJavaCollection` method that implicitly converts a Scala - * `Iterable` to an immutable Java `Collection`. - * - * If the Scala `Iterable` was previously obtained from an implicit or - * explicit call of `asSizedIterable(java.util.Collection)` then the - * original Java `Collection` will be returned. - * - * @param i The `SizedIterable` to be converted. - * @return An object with an `asJava` method that returns a Java - * `Collection` view of the argument. - */ - implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = - new AsJavaCollection(i) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` - * to a Java `List`. - * - * The returned Java `List` is backed by the provided Scala `Buffer` and any - * side-effects of using it via the Java interface will be visible via the - * Scala interface and vice versa. - * - * If the Scala `Buffer` was previously obtained from an implicit or explicit - * call of `asBuffer(java.util.List)` then the original Java `List` will be - * returned. - * - * @param b The `Buffer` to be converted. - * @return An object with an `asJava` method that returns a Java `List` view - * of the argument. - */ - implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = - new AsJava(bufferAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` - * to a Java `List`. - * - * The returned Java `List` is backed by the provided Scala `Seq` and any - * side-effects of using it via the Java interface will be visible via the - * Scala interface and vice versa. - * - * If the Scala `Seq` was previously obtained from an implicit or explicit - * call of `asSeq(java.util.List)` then the original Java `List` will be - * returned. - * - * @param b The `Seq` to be converted. - * @return An object with an `asJava` method that returns a Java `List` - * view of the argument. - */ - implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = - new AsJava(mutableSeqAsJavaList(b)) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Seq` to a - * Java `List`. - * - * The returned Java `List` is backed by the provided Scala `Seq` and any - * side-effects of using it via the Java interface will be visible via the - * Scala interface and vice versa. - * - * If the Scala `Seq` was previously obtained from an implicit or explicit - * call of `asSeq(java.util.List)` then the original Java `List` will be - * returned. - * - * @param b The `Seq` to be converted. - * @return An object with an `asJava` method that returns a Java `List` - * view of the argument. - */ - implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = - new AsJava(seqAsJavaList(b)) +object JavaConverters extends DecorateAsJava with DecorateAsScala { + @deprecated("Don't access these decorators directly.", "2.10.0") + type AsJava[A] = Decorators.AsJava[A] + @deprecated("Don't access these decorators directly.", "2.10.0") + type AsScala[A] = Decorators.AsScala[A] + @deprecated("Don't access these decorators directly.", "2.10.0") + type AsJavaCollection[A] = Decorators.AsJavaCollection[A] + @deprecated("Don't access these decorators directly.", "2.10.0") + type AsJavaEnumeration[A] = Decorators.AsJavaEnumeration[A] + @deprecated("Don't access these decorators directly.", "2.10.0") + type AsJavaDictionary[A, B] = Decorators.AsJavaDictionary[A, B] @deprecated("Use bufferAsJavaListConverter instead", "2.9.0") def asJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = bufferAsJavaListConverter(b) + @deprecated("Use mutableSeqAsJavaListConverter instead", "2.9.0") def asJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = mutableSeqAsJavaListConverter(b) + @deprecated("Use seqAsJavaListConverter instead", "2.9.0") def asJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = seqAsJavaListConverter(b) - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Set`> - * to a Java `Set`. - * - * The returned Java `Set` is backed by the provided Scala `Set` and any - * side-effects of using it via the Java interface will be visible via - * the Scala interface and vice versa. - * - * If the Scala `Set` was previously obtained from an implicit or explicit - * call of `asSet(java.util.Set)` then the original Java `Set` will be - * returned. - * - * @param s The `Set` to be converted. - * @return An object with an `asJava` method that returns a Java `Set` view - * of the argument. - */ - implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = - new AsJava(mutableSetAsJavaSet(s)) - @deprecated("Use mutableSetAsJavaSetConverter instead", "2.9.0") def asJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = mutableSetAsJavaSetConverter(s) - /** - * Adds an `asJava` method that implicitly converts a Scala `Set` to a - * Java `Set`. - * - * The returned Java `Set` is backed by the provided Scala `Set` and any - * side-effects of using it via the Java interface will be visible via - * the Scala interface and vice versa. - * - * If the Scala `Set` was previously obtained from an implicit or explicit - * call of `asSet(java.util.Set)` then the original Java `Set` will be - * returned. - * - * @param s The `Set` to be converted. - * @return An object with an `asJava` method that returns a Java `Set` view - * of the argument. - */ - implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = - new AsJava(setAsJavaSet(s)) - @deprecated("Use setAsJavaSetConverter instead", "2.9.0") def asJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = setAsJavaSetConverter(s) - /** - * Adds an `asJava` method that implicitly converts a Scala mutable `Map` - * to a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and any - * side-effects of using it via the Java interface will be visible via the - * Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or explicit - * call of `asMap(java.util.Map)` then the original Java `Map` will be - * returned. - * - * @param m The `Map` to be converted. - * @return An object with an `asJava` method that returns a Java `Map` view - * of the argument. - */ - implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mutableMapAsJavaMap(m)) - @deprecated("use mutableMapAsJavaMapConverter instead", "2.9.0") def asJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = mutableMapAsJavaMapConverter(m) - /** - * Adds an `asJavaDictionary` method that implicitly converts a Scala - * mutable `Map` to a Java `Dictionary`. - * - * The returned Java `Dictionary` is backed by the provided Scala - * `Dictionary` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `Dictionary` was previously obtained from an implicit or - * explicit call of `asMap(java.util.Dictionary)` then the original - * Java `Dictionary` will be returned. - * - * @param m The `Map` to be converted. - * @return An object with an `asJavaDictionary` method that returns a - * Java `Dictionary` view of the argument. - */ - implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] = - new AsJavaDictionary(m) - - /** - * Adds an `asJava` method that implicitly converts a Scala `Map` to - * a Java `Map`. - * - * The returned Java `Map` is backed by the provided Scala `Map` and any - * side-effects of using it via the Java interface will be visible via - * the Scala interface and vice versa. - * - * If the Scala `Map` was previously obtained from an implicit or explicit - * call of `asMap(java.util.Map)` then the original Java `Map` will be - * returned. - * - * @param m The `Map` to be converted. - * @return An object with an `asJava` method that returns a Java `Map` view - * of the argument. - */ - implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = - new AsJava(mapAsJavaMap(m)) - @deprecated("Use mapAsJavaMapConverter instead", "2.9.0") def asJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = mapAsJavaMapConverter(m) - /** - * Adds an `asJava` method that implicitly converts a Scala mutable - * `ConcurrentMap` to a Java `ConcurrentMap`. - * - * The returned Java `ConcurrentMap` is backed by the provided Scala - * `ConcurrentMap` and any side-effects of using it via the Java interface - * will be visible via the Scala interface and vice versa. - * - * If the Scala `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)` - * then the original Java `ConcurrentMap` will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return An object with an `asJava` method that returns a Java - * `ConcurrentMap` view of the argument. - */ - implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] = - new AsJava(asJavaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterator` to - * a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java `Iterator` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterator` was previously obtained from an implicit or - * explicit call of `asIterator(scala.collection.Iterator)` then the - * original Scala `Iterator` will be returned. - * - * @param i The `Iterator` to be converted. - * @return An object with an `asScala` method that returns a Scala - * `Iterator` view of the argument. - */ - implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = - new AsScala(asScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Enumeration` - * to a Scala `Iterator`. - * - * The returned Scala `Iterator` is backed by the provided Java - * `Enumeration` and any side-effects of using it via the Scala interface - * will be visible via the Java interface and vice versa. - * - * If the Java `Enumeration` was previously obtained from an implicit or - * explicit call of `asEnumeration(scala.collection.Iterator)` then the - * original Scala `Iterator` will be returned. - * - * @param i The `Enumeration` to be converted. - * @return An object with an `asScala` method that returns a Scala - * `Iterator` view of the argument. - */ - implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = - new AsScala(enumerationAsScalaIterator(i)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Iterable` to - * a Scala `Iterable`. - * - * The returned Scala `Iterable` is backed by the provided Java `Iterable` - * and any side-effects of using it via the Scala interface will be visible - * via the Java interface and vice versa. - * - * If the Java `Iterable` was previously obtained from an implicit or - * explicit call of `asIterable(scala.collection.Iterable)` then the original - * Scala `Iterable` will be returned. - * - * @param i The `Iterable` to be converted. - * @return An object with an `asScala` method that returns a Scala `Iterable` - * view of the argument. - */ - implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = - new AsScala(iterableAsScalaIterable(i)) - @deprecated("Use iterableAsScalaIterableConverter instead", "2.9.0") def asScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = iterableAsScalaIterableConverter(i) - /** - * Adds an `asScala` method that implicitly converts a Java `Collection` to - * an Scala `Iterable`. - * - * If the Java `Collection` was previously obtained from an implicit or - * explicit call of `asCollection(scala.collection.SizedIterable)` then - * the original Scala `SizedIterable` will be returned. - * - * @param i The `Collection` to be converted. - * @return An object with an `asScala` method that returns a Scala - * `SizedIterable` view of the argument. - */ - implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = - new AsScala(collectionAsScalaIterable(i)) - @deprecated("Use collectionAsScalaIterableConverter instead", "2.9.0") def asScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = collectionAsScalaIterableConverter(i) - /** - * Adds an `asScala` method that implicitly converts a Java `List` to a - * Scala mutable `Buffer`. - * - * The returned Scala `Buffer` is backed by the provided Java `List` and - * any side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * If the Java `List` was previously obtained from an implicit or explicit - * call of `asList(scala.collection.mutable.Buffer)` then the original - * Scala `Buffer` will be returned. - * - * @param l The `List` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `Buffer` view of the argument. - */ - implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = - new AsScala(asScalaBuffer(l)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Set` to a - * Scala mutable `Set`. - * - * The returned Scala `Set` is backed by the provided Java `Set` and any - * side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * If the Java `Set` was previously obtained from an implicit or explicit - * call of `asSet(scala.collection.mutable.Set)` then the original - * Scala `Set` will be returned. - * - * @param s The `Set` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `Set` view of the argument. - */ - implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = - new AsScala(asScalaSet(s)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala - * mutable `Map`. The returned Scala `Map` is backed by the provided Java - * `Map` and any side-effects of using it via the Scala interface will - * be visible via the Java interface and vice versa. - * - * If the Java `Map` was previously obtained from an implicit or explicit - * call of `asMap(scala.collection.mutable.Map)` then the original - * Scala `Map` will be returned. - * - * @param m The `Map` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `Map` view of the argument. - */ - implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(mapAsScalaMap(m)) - @deprecated("Use mapAsScalaMapConverter instead", "2.9.0") def asScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = mapAsScalaMapConverter(m) - /** - * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` - * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is - * backed by the provided Java `ConcurrentMap` and any side-effects of using - * it via the Scala interface will be visible via the Java interface and - * vice versa. - * - * If the Java `ConcurrentMap` was previously obtained from an implicit or - * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` - * then the original Scala `ConcurrentMap` will be returned. - * - * @param m The `ConcurrentMap` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `ConcurrentMap` view of the argument. - */ - implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] = - new AsScala(asScalaConcurrentMap(m)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Dictionary` - * to a Scala mutable `Map[String, String]`. The returned Scala - * `Map[String, String]` is backed by the provided Java `Dictionary` and - * any side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * @param m The `Dictionary` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `Map[String, String]` view of the argument. - */ - implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] = - new AsScala(dictionaryAsScalaMap(p)) - - /** - * Adds an `asScala` method that implicitly converts a Java `Properties` - * to a Scala mutable `Map[String, String]`. The returned Scala - * `Map[String, String]` is backed by the provided Java `Properties` and - * any side-effects of using it via the Scala interface will be visible via - * the Java interface and vice versa. - * - * @param m The `Properties` to be converted. - * @return An object with an `asScala` method that returns a Scala mutable - * `Map[String, String]` view of the argument. - */ - implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - new AsScala(propertiesAsScalaMap(p)) - @deprecated("Use propertiesAsScalaMapConverter instead", "2.9.0") - def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = - propertiesAsScalaMapConverter(p) - -} - -object JavaConverters extends JavaConverters \ No newline at end of file + def asScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = propertiesAsScalaMapConverter(p) +} \ No newline at end of file diff --git a/src/library/scala/collection/convert/DecorateAsJava.scala b/src/library/scala/collection/convert/DecorateAsJava.scala new file mode 100644 index 0000000000..76837d937c --- /dev/null +++ b/src/library/scala/collection/convert/DecorateAsJava.scala @@ -0,0 +1,296 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import Decorators._ +import WrapAsJava._ + +/** A collection of decorators that allow to convert between + * Scala and Java collections using `asScala` and `asJava` methods. + * + * The following conversions are supported via `asJava`, `asScala` + * + * - `scala.collection.Iterable` <=> `java.lang.Iterable` + * - `scala.collection.Iterator` <=> `java.util.Iterator` + * - `scala.collection.mutable.Buffer` <=> `java.util.List` + * - `scala.collection.mutable.Set` <=> `java.util.Set` + * - `scala.collection.mutable.Map` <=> `java.util.Map` + * - `scala.collection.mutable.ConcurrentMap` <=> `java.util.concurrent.ConcurrentMap` + * + * In all cases, converting from a source type to a target type and back + * again will return the original source object, e.g. + * {{{ + * import scala.collection.JavaConverters._ + * + * val sl = new scala.collection.mutable.ListBuffer[Int] + * val jl : java.util.List[Int] = sl.asJava + * val sl2 : scala.collection.mutable.Buffer[Int] = jl.asScala + * assert(sl eq sl2) + * }}} + * The following conversions also are supported, but the + * direction Scala to Java is done my a more specifically named method: + * `asJavaCollection`, `asJavaEnumeration`, `asJavaDictionary`. + * + * - `scala.collection.Iterable` <=> `java.util.Collection` + * - `scala.collection.Iterator` <=> `java.util.Enumeration` + * - `scala.collection.mutable.Map` <=> `java.util.Dictionary` + * + * In addition, the following one way conversions are provided via `asJava`: + * + * - `scala.collection.Seq` => `java.util.List` + * - `scala.collection.mutable.Seq` => `java.util.List` + * - `scala.collection.Set` => `java.util.Set` + * - `scala.collection.Map` => `java.util.Map` + * + * @author Martin Odersky + * @since 2.8.1 + */ + +trait DecorateAsJava { + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterator` to a + * Java `Iterator`. The returned Java `Iterator` is backed by the provided Scala + * `Iterator` and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or explicit + * call of `asIterator(java.util.Iterator)` then the original Java `Iterator` + * will be returned by the `asJava` method. + * + * @param i The `Iterator` to be converted. + * @return An object with an `asJava` method that returns a Java `Iterator` view of the argument. + */ + implicit def asJavaIteratorConverter[A](i : Iterator[A]): AsJava[ju.Iterator[A]] = + new AsJava(asJavaIterator(i)) + + /** + * Adds an `asJavaEnumeration` method that implicitly converts a Scala + * `Iterator` to a Java `Enumeration`. The returned Java `Enumeration` is + * backed by the provided Scala `Iterator` and any side-effects of using + * it via the Java interface will be visible via the Scala interface and + * vice versa. + * + * If the Scala `Iterator` was previously obtained from an implicit or + * explicit call of `asIterator(java.util.Enumeration)` then the + * original Java `Enumeration` will be returned. + * + * @param i The `Iterator` to be converted. + * @return An object with an `asJavaEnumeration` method that returns a Java + * `Enumeration` view of the argument. + */ + implicit def asJavaEnumerationConverter[A](i : Iterator[A]): AsJavaEnumeration[A] = + new AsJavaEnumeration(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Iterable` to + * a Java `Iterable`. + * + * The returned Java `Iterable` is backed by the provided Scala `Iterable` + * and any side-effects of using it via the Java interface will be visible + * via the Scala interface and vice versa. + * + * If the Scala `Iterable` was previously obtained from an implicit or + * explicit call of `asIterable(java.lang.Iterable)` then the original + * Java `Iterable` will be returned. + * + * @param i The `Iterable` to be converted. + * @return An object with an `asJavaCollection` method that returns a Java + * `Iterable` view of the argument. + */ + implicit def asJavaIterableConverter[A](i : Iterable[A]): AsJava[jl.Iterable[A]] = + new AsJava(asJavaIterable(i)) + + /** + * Adds an `asJavaCollection` method that implicitly converts a Scala + * `Iterable` to an immutable Java `Collection`. + * + * If the Scala `Iterable` was previously obtained from an implicit or + * explicit call of `asSizedIterable(java.util.Collection)` then the + * original Java `Collection` will be returned. + * + * @param i The `SizedIterable` to be converted. + * @return An object with an `asJava` method that returns a Java + * `Collection` view of the argument. + */ + implicit def asJavaCollectionConverter[A](i : Iterable[A]): AsJavaCollection[A] = + new AsJavaCollection(i) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Buffer` + * to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Buffer` and any + * side-effects of using it via the Java interface will be visible via the + * Scala interface and vice versa. + * + * If the Scala `Buffer` was previously obtained from an implicit or explicit + * call of `asBuffer(java.util.List)` then the original Java `List` will be + * returned. + * + * @param b The `Buffer` to be converted. + * @return An object with an `asJava` method that returns a Java `List` view + * of the argument. + */ + implicit def bufferAsJavaListConverter[A](b : mutable.Buffer[A]): AsJava[ju.List[A]] = + new AsJava(bufferAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Seq` + * to a Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any + * side-effects of using it via the Java interface will be visible via the + * Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit + * call of `asSeq(java.util.List)` then the original Java `List` will be + * returned. + * + * @param b The `Seq` to be converted. + * @return An object with an `asJava` method that returns a Java `List` + * view of the argument. + */ + implicit def mutableSeqAsJavaListConverter[A](b : mutable.Seq[A]): AsJava[ju.List[A]] = + new AsJava(mutableSeqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Seq` to a + * Java `List`. + * + * The returned Java `List` is backed by the provided Scala `Seq` and any + * side-effects of using it via the Java interface will be visible via the + * Scala interface and vice versa. + * + * If the Scala `Seq` was previously obtained from an implicit or explicit + * call of `asSeq(java.util.List)` then the original Java `List` will be + * returned. + * + * @param b The `Seq` to be converted. + * @return An object with an `asJava` method that returns a Java `List` + * view of the argument. + */ + implicit def seqAsJavaListConverter[A](b : Seq[A]): AsJava[ju.List[A]] = + new AsJava(seqAsJavaList(b)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Set`> + * to a Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any + * side-effects of using it via the Java interface will be visible via + * the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit + * call of `asSet(java.util.Set)` then the original Java `Set` will be + * returned. + * + * @param s The `Set` to be converted. + * @return An object with an `asJava` method that returns a Java `Set` view + * of the argument. + */ + implicit def mutableSetAsJavaSetConverter[A](s : mutable.Set[A]): AsJava[ju.Set[A]] = + new AsJava(mutableSetAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Set` to a + * Java `Set`. + * + * The returned Java `Set` is backed by the provided Scala `Set` and any + * side-effects of using it via the Java interface will be visible via + * the Scala interface and vice versa. + * + * If the Scala `Set` was previously obtained from an implicit or explicit + * call of `asSet(java.util.Set)` then the original Java `Set` will be + * returned. + * + * @param s The `Set` to be converted. + * @return An object with an `asJava` method that returns a Java `Set` view + * of the argument. + */ + implicit def setAsJavaSetConverter[A](s : Set[A]): AsJava[ju.Set[A]] = + new AsJava(setAsJavaSet(s)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable `Map` + * to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any + * side-effects of using it via the Java interface will be visible via the + * Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit + * call of `asMap(java.util.Map)` then the original Java `Map` will be + * returned. + * + * @param m The `Map` to be converted. + * @return An object with an `asJava` method that returns a Java `Map` view + * of the argument. + */ + implicit def mutableMapAsJavaMapConverter[A, B](m : mutable.Map[A, B]): AsJava[ju.Map[A, B]] = + new AsJava(mutableMapAsJavaMap(m)) + + /** + * Adds an `asJavaDictionary` method that implicitly converts a Scala + * mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala + * `Dictionary` and any side-effects of using it via the Java interface + * will be visible via the Scala interface and vice versa. + * + * If the Scala `Dictionary` was previously obtained from an implicit or + * explicit call of `asMap(java.util.Dictionary)` then the original + * Java `Dictionary` will be returned. + * + * @param m The `Map` to be converted. + * @return An object with an `asJavaDictionary` method that returns a + * Java `Dictionary` view of the argument. + */ + implicit def asJavaDictionaryConverter[A, B](m : mutable.Map[A, B]): AsJavaDictionary[A, B] = + new AsJavaDictionary(m) + + /** + * Adds an `asJava` method that implicitly converts a Scala `Map` to + * a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and any + * side-effects of using it via the Java interface will be visible via + * the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or explicit + * call of `asMap(java.util.Map)` then the original Java `Map` will be + * returned. + * + * @param m The `Map` to be converted. + * @return An object with an `asJava` method that returns a Java `Map` view + * of the argument. + */ + implicit def mapAsJavaMapConverter[A, B](m : Map[A, B]): AsJava[ju.Map[A, B]] = + new AsJava(mapAsJavaMap(m)) + + /** + * Adds an `asJava` method that implicitly converts a Scala mutable + * `ConcurrentMap` to a Java `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala + * `ConcurrentMap` and any side-effects of using it via the Java interface + * will be visible via the Scala interface and vice versa. + * + * If the Scala `ConcurrentMap` was previously obtained from an implicit or + * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)` + * then the original Java `ConcurrentMap` will be returned. + * + * @param m The `ConcurrentMap` to be converted. + * @return An object with an `asJava` method that returns a Java + * `ConcurrentMap` view of the argument. + */ + implicit def asJavaConcurrentMapConverter[A, B](m: mutable.ConcurrentMap[A, B]): AsJava[juc.ConcurrentMap[A, B]] = + new AsJava(asJavaConcurrentMap(m)) +} diff --git a/src/library/scala/collection/convert/DecorateAsScala.scala b/src/library/scala/collection/convert/DecorateAsScala.scala new file mode 100644 index 0000000000..bb14228e67 --- /dev/null +++ b/src/library/scala/collection/convert/DecorateAsScala.scala @@ -0,0 +1,189 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import Decorators._ +import WrapAsScala._ + +trait DecorateAsScala { + /** + * Adds an `asScala` method that implicitly converts a Java `Iterator` to + * a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` + * and any side-effects of using it via the Scala interface will be visible + * via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or + * explicit call of `asIterator(scala.collection.Iterator)` then the + * original Scala `Iterator` will be returned. + * + * @param i The `Iterator` to be converted. + * @return An object with an `asScala` method that returns a Scala + * `Iterator` view of the argument. + */ + implicit def asScalaIteratorConverter[A](i : ju.Iterator[A]): AsScala[Iterator[A]] = + new AsScala(asScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Enumeration` + * to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java + * `Enumeration` and any side-effects of using it via the Scala interface + * will be visible via the Java interface and vice versa. + * + * If the Java `Enumeration` was previously obtained from an implicit or + * explicit call of `asEnumeration(scala.collection.Iterator)` then the + * original Scala `Iterator` will be returned. + * + * @param i The `Enumeration` to be converted. + * @return An object with an `asScala` method that returns a Scala + * `Iterator` view of the argument. + */ + implicit def enumerationAsScalaIteratorConverter[A](i : ju.Enumeration[A]): AsScala[Iterator[A]] = + new AsScala(enumerationAsScalaIterator(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Iterable` to + * a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` + * and any side-effects of using it via the Scala interface will be visible + * via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or + * explicit call of `asIterable(scala.collection.Iterable)` then the original + * Scala `Iterable` will be returned. + * + * @param i The `Iterable` to be converted. + * @return An object with an `asScala` method that returns a Scala `Iterable` + * view of the argument. + */ + implicit def iterableAsScalaIterableConverter[A](i : jl.Iterable[A]): AsScala[Iterable[A]] = + new AsScala(iterableAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Collection` to + * an Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or + * explicit call of `asCollection(scala.collection.SizedIterable)` then + * the original Scala `SizedIterable` will be returned. + * + * @param i The `Collection` to be converted. + * @return An object with an `asScala` method that returns a Scala + * `SizedIterable` view of the argument. + */ + implicit def collectionAsScalaIterableConverter[A](i : ju.Collection[A]): AsScala[Iterable[A]] = + new AsScala(collectionAsScalaIterable(i)) + + /** + * Adds an `asScala` method that implicitly converts a Java `List` to a + * Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` and + * any side-effects of using it via the Scala interface will be visible via + * the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or explicit + * call of `asList(scala.collection.mutable.Buffer)` then the original + * Scala `Buffer` will be returned. + * + * @param l The `List` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `Buffer` view of the argument. + */ + implicit def asScalaBufferConverter[A](l : ju.List[A]): AsScala[mutable.Buffer[A]] = + new AsScala(asScalaBuffer(l)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Set` to a + * Scala mutable `Set`. + * + * The returned Scala `Set` is backed by the provided Java `Set` and any + * side-effects of using it via the Scala interface will be visible via + * the Java interface and vice versa. + * + * If the Java `Set` was previously obtained from an implicit or explicit + * call of `asSet(scala.collection.mutable.Set)` then the original + * Scala `Set` will be returned. + * + * @param s The `Set` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `Set` view of the argument. + */ + implicit def asScalaSetConverter[A](s : ju.Set[A]): AsScala[mutable.Set[A]] = + new AsScala(asScalaSet(s)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Map` to a Scala + * mutable `Map`. The returned Scala `Map` is backed by the provided Java + * `Map` and any side-effects of using it via the Scala interface will + * be visible via the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or explicit + * call of `asMap(scala.collection.mutable.Map)` then the original + * Scala `Map` will be returned. + * + * @param m The `Map` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `Map` view of the argument. + */ + implicit def mapAsScalaMapConverter[A, B](m : ju.Map[A, B]): AsScala[mutable.Map[A, B]] = + new AsScala(mapAsScalaMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `ConcurrentMap` + * to a Scala mutable `ConcurrentMap`. The returned Scala `ConcurrentMap` is + * backed by the provided Java `ConcurrentMap` and any side-effects of using + * it via the Scala interface will be visible via the Java interface and + * vice versa. + * + * If the Java `ConcurrentMap` was previously obtained from an implicit or + * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` + * then the original Scala `ConcurrentMap` will be returned. + * + * @param m The `ConcurrentMap` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `ConcurrentMap` view of the argument. + */ + implicit def asScalaConcurrentMapConverter[A, B](m: juc.ConcurrentMap[A, B]): AsScala[mutable.ConcurrentMap[A, B]] = + new AsScala(asScalaConcurrentMap(m)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Dictionary` + * to a Scala mutable `Map[String, String]`. The returned Scala + * `Map[String, String]` is backed by the provided Java `Dictionary` and + * any side-effects of using it via the Scala interface will be visible via + * the Java interface and vice versa. + * + * @param m The `Dictionary` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `Map[String, String]` view of the argument. + */ + implicit def dictionaryAsScalaMapConverter[A, B](p: ju.Dictionary[A, B]): AsScala[mutable.Map[A, B]] = + new AsScala(dictionaryAsScalaMap(p)) + + /** + * Adds an `asScala` method that implicitly converts a Java `Properties` + * to a Scala mutable `Map[String, String]`. The returned Scala + * `Map[String, String]` is backed by the provided Java `Properties` and + * any side-effects of using it via the Scala interface will be visible via + * the Java interface and vice versa. + * + * @param m The `Properties` to be converted. + * @return An object with an `asScala` method that returns a Scala mutable + * `Map[String, String]` view of the argument. + */ + implicit def propertiesAsScalaMapConverter(p: ju.Properties): AsScala[mutable.Map[String, String]] = + new AsScala(propertiesAsScalaMap(p)) +} diff --git a/src/library/scala/collection/convert/Decorators.scala b/src/library/scala/collection/convert/Decorators.scala new file mode 100644 index 0000000000..3bdd9a0f1c --- /dev/null +++ b/src/library/scala/collection/convert/Decorators.scala @@ -0,0 +1,46 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } + +private[collection] trait Decorators { + /** Generic class containing the `asJava` converter method */ + class AsJava[A](op: => A) { + /** Converts a Scala collection to the corresponding Java collection */ + def asJava: A = op + } + + /** Generic class containing the `asScala` converter method */ + class AsScala[A](op: => A) { + /** Converts a Java collection to the corresponding Scala collection */ + def asScala: A = op + } + + /** Generic class containing the `asJavaCollection` converter method */ + class AsJavaCollection[A](i: Iterable[A]) { + /** Converts a Scala `Iterable` to a Java `Collection` */ + def asJavaCollection: ju.Collection[A] = JavaConversions.asJavaCollection(i) + } + + /** Generic class containing the `asJavaEnumeration` converter method */ + class AsJavaEnumeration[A](i: Iterator[A]) { + /** Converts a Scala `Iterator` to a Java `Enumeration` */ + def asJavaEnumeration: ju.Enumeration[A] = JavaConversions.asJavaEnumeration(i) + } + + /** Generic class containing the `asJavaDictionary` converter method */ + class AsJavaDictionary[A, B](m : mutable.Map[A, B]) { + /** Converts a Scala `Map` to a Java `Dictionary` */ + def asJavaDictionary: ju.Dictionary[A, B] = JavaConversions.asJavaDictionary(m) + } +} + +private[collection] object Decorators extends Decorators diff --git a/src/library/scala/collection/convert/WrapAsJava.scala b/src/library/scala/collection/convert/WrapAsJava.scala new file mode 100644 index 0000000000..6274518d1a --- /dev/null +++ b/src/library/scala/collection/convert/WrapAsJava.scala @@ -0,0 +1,256 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import Wrappers._ + +trait WrapAsJava { + /** + * Implicitly converts a Scala Iterator to a Java Iterator. + * The returned Java Iterator is backed by the provided Scala + * Iterator and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Iterator was previously obtained from an implicit or + * explicit call of `asIterator(java.util.Iterator)` then the original + * Java Iterator will be returned. + * + * @param i The Iterator to be converted. + * @return A Java Iterator view of the argument. + */ + implicit def asJavaIterator[A](it: Iterator[A]): ju.Iterator[A] = it match { + case JIteratorWrapper(wrapped) => wrapped.asInstanceOf[ju.Iterator[A]] + case _ => IteratorWrapper(it) + } + + /** + * Implicitly converts a Scala Iterator to a Java Enumeration. + * The returned Java Enumeration is backed by the provided Scala + * Iterator and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Iterator was previously obtained from an implicit or + * explicit call of `asIterator(java.util.Enumeration)` then the + * original Java Enumeration will be returned. + * + * @param i The Iterator to be converted. + * @return A Java Enumeration view of the argument. + */ + implicit def asJavaEnumeration[A](it: Iterator[A]): ju.Enumeration[A] = it match { + case JEnumerationWrapper(wrapped) => wrapped.asInstanceOf[ju.Enumeration[A]] + case _ => IteratorWrapper(it) + } + + /** + * Implicitly converts a Scala Iterable to a Java Iterable. + * The returned Java Iterable is backed by the provided Scala + * Iterable and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Iterable was previously obtained from an implicit or + * explicit call of `asIterable(java.lang.Iterable)` then the original + * Java Iterable will be returned. + * + * @param i The Iterable to be converted. + * @return A Java Iterable view of the argument. + */ + implicit def asJavaIterable[A](i: Iterable[A]): jl.Iterable[A] = i match { + case JIterableWrapper(wrapped) => wrapped.asInstanceOf[jl.Iterable[A]] + case _ => IterableWrapper(i) + } + + /** + * Implicitly converts a Scala Iterable to an immutable Java + * Collection. + * + * If the Scala Iterable was previously obtained from an implicit or + * explicit call of `asSizedIterable(java.util.Collection)` then the original + * Java Collection will be returned. + * + * @param i The SizedIterable to be converted. + * @return A Java Collection view of the argument. + */ + implicit def asJavaCollection[A](it: Iterable[A]): ju.Collection[A] = it match { + case JCollectionWrapper(wrapped) => wrapped.asInstanceOf[ju.Collection[A]] + case _ => new IterableWrapper(it) + } + + /** + * Implicitly converts a Scala mutable Buffer to a Java List. + * The returned Java List is backed by the provided Scala + * Buffer and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Buffer was previously obtained from an implicit or + * explicit call of `asBuffer(java.util.List)` then the original + * Java List will be returned. + * + * @param b The Buffer to be converted. + * @return A Java List view of the argument. + */ + implicit def bufferAsJavaList[A](b: mutable.Buffer[A]): ju.List[A] = b match { + case JListWrapper(wrapped) => wrapped + case _ => new MutableBufferWrapper(b) + } + + /** + * Implicitly converts a Scala mutable Seq to a Java List. + * The returned Java List is backed by the provided Scala + * Seq and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Seq was previously obtained from an implicit or + * explicit call of `asSeq(java.util.List)` then the original + * Java List will be returned. + * + * @param b The Seq to be converted. + * @return A Java List view of the argument. + */ + implicit def mutableSeqAsJavaList[A](seq: mutable.Seq[A]): ju.List[A] = seq match { + case JListWrapper(wrapped) => wrapped + case _ => new MutableSeqWrapper(seq) + } + + /** + * Implicitly converts a Scala Seq to a Java List. + * The returned Java List is backed by the provided Scala + * Seq and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Seq was previously obtained from an implicit or + * explicit call of `asSeq(java.util.List)` then the original + * Java List will be returned. + * + * @param b The Seq to be converted. + * @return A Java List view of the argument. + */ + implicit def seqAsJavaList[A](seq: Seq[A]): ju.List[A] = seq match { + case JListWrapper(wrapped) => wrapped.asInstanceOf[ju.List[A]] + case _ => new SeqWrapper(seq) + } + + /** + * Implicitly converts a Scala mutable Set to a Java Set. + * The returned Java Set is backed by the provided Scala + * Set and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Set was previously obtained from an implicit or + * explicit call of `asSet(java.util.Set)` then the original + * Java Set will be returned. + * + * @param s The Set to be converted. + * @return A Java Set view of the argument. + */ + implicit def mutableSetAsJavaSet[A](s: mutable.Set[A]): ju.Set[A] = s match { + case JSetWrapper(wrapped) => wrapped + case _ => new MutableSetWrapper(s) + } + + /** + * Implicitly converts a Scala Set to a Java Set. + * The returned Java Set is backed by the provided Scala + * Set and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Set was previously obtained from an implicit or + * explicit call of asSet(java.util.Set) then the original + * Java Set will be returned. + * + * @param s The Set to be converted. + * @return A Java Set view of the argument. + */ + implicit def setAsJavaSet[A](s: Set[A]): ju.Set[A] = s match { + case JSetWrapper(wrapped) => wrapped + case _ => new SetWrapper(s) + } + + /** + * Implicitly converts a Scala mutable Map to a Java Map. + * The returned Java Map is backed by the provided Scala + * Map and any side-effects of using it via the Java interface will + * be visible via the Scala interface and vice versa. + * + * If the Scala Map was previously obtained from an implicit or + * explicit call of `asMap(java.util.Map)` then the original + * Java Map will be returned. + * + * @param m The Map to be converted. + * @return A Java Map view of the argument. + */ + implicit def mutableMapAsJavaMap[A, B](m: mutable.Map[A, B]): ju.Map[A, B] = m match { + //case JConcurrentMapWrapper(wrapped) => wrapped + case JMapWrapper(wrapped) => wrapped + case _ => new MutableMapWrapper(m) + } + + /** + * Implicitly converts a Scala mutable `Map` to a Java `Dictionary`. + * + * The returned Java `Dictionary` is backed by the provided Scala + * `Dictionary` and any side-effects of using it via the Java interface + * will be visible via the Scala interface and vice versa. + * + * If the Scala `Dictionary` was previously obtained from an implicit or + * explicit call of `asMap(java.util.Dictionary)` then the original + * Java Dictionary will be returned. + * + * @param m The `Map` to be converted. + * @return A Java `Dictionary` view of the argument. + */ + implicit def asJavaDictionary[A, B](m: mutable.Map[A, B]): ju.Dictionary[A, B] = m match { + //case JConcurrentMapWrapper(wrapped) => wrapped + case JDictionaryWrapper(wrapped) => wrapped + case _ => new DictionaryWrapper(m) + } + + /** + * Implicitly converts a Scala `Map` to a Java `Map`. + * + * The returned Java `Map` is backed by the provided Scala `Map` and + * any side-effects of using it via the Java interface will be visible + * via the Scala interface and vice versa. + * + * If the Scala `Map` was previously obtained from an implicit or + * explicit call of `asMap(java.util.Map)` then the original + * Java `Map` will be returned. + * + * @param m The `Map` to be converted. + * @return A Java `Map` view of the argument. + */ + implicit def mapAsJavaMap[A, B](m: Map[A, B]): ju.Map[A, B] = m match { + //case JConcurrentMapWrapper(wrapped) => wrapped + case JMapWrapper(wrapped) => wrapped.asInstanceOf[ju.Map[A, B]] + case _ => new MapWrapper(m) + } + + /** + * Implicitly converts a Scala mutable `ConcurrentMap` to a Java + * `ConcurrentMap`. + * + * The returned Java `ConcurrentMap` is backed by the provided Scala + * `ConcurrentMap` and any side-effects of using it via the Java interface + * will be visible via the Scala interface and vice versa. + * + * If the Scala `ConcurrentMap` was previously obtained from an implicit or + * explicit call of `asConcurrentMap(java.util.concurrect.ConcurrentMap)` + * then the original Java ConcurrentMap will be returned. + * + * @param m The `ConcurrentMap` to be converted. + * @return A Java `ConcurrentMap` view of the argument. + */ + implicit def asJavaConcurrentMap[A, B](m: mutable.ConcurrentMap[A, B]): juc.ConcurrentMap[A, B] = m match { + case JConcurrentMapWrapper(wrapped) => wrapped + case _ => new ConcurrentMapWrapper(m) + } +} + +object WrapAsJava extends WrapAsJava { } diff --git a/src/library/scala/collection/convert/WrapAsScala.scala b/src/library/scala/collection/convert/WrapAsScala.scala new file mode 100644 index 0000000000..02b58f55a4 --- /dev/null +++ b/src/library/scala/collection/convert/WrapAsScala.scala @@ -0,0 +1,193 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import Wrappers._ + +trait WrapAsScala { + /** + * Implicitly converts a Java `Iterator` to a Scala `Iterator`. + * + * The returned Scala `Iterator` is backed by the provided Java `Iterator` + * and any side-effects of using it via the Scala interface will be visible + * via the Java interface and vice versa. + * + * If the Java `Iterator` was previously obtained from an implicit or + * explicit call of `asIterator(scala.collection.Iterator)` then the + * original Scala `Iterator` will be returned. + * + * @param i The `Iterator` to be converted. + * @return A Scala `Iterator` view of the argument. + */ + implicit def asScalaIterator[A](it: ju.Iterator[A]): Iterator[A] = it match { + case IteratorWrapper(wrapped) => wrapped + case _ => JIteratorWrapper(it) + } + + /** + * Implicitly converts a Java Enumeration to a Scala Iterator. + * The returned Scala Iterator is backed by the provided Java + * Enumeration and any side-effects of using it via the Scala interface will + * be visible via the Java interface and vice versa. + * + * If the Java Enumeration was previously obtained from an implicit or + * explicit call of `enumerationAsScalaIterator(scala.collection.Iterator)` + * then the original Scala Iterator will be returned. + * + * @param i The Enumeration to be converted. + * @return A Scala Iterator view of the argument. + */ + implicit def enumerationAsScalaIterator[A](i: ju.Enumeration[A]): Iterator[A] = i match { + case IteratorWrapper(wrapped) => wrapped + case _ => JEnumerationWrapper(i) + } + + /** + * Implicitly converts a Java `Iterable` to a Scala `Iterable`. + * + * The returned Scala `Iterable` is backed by the provided Java `Iterable` + * and any side-effects of using it via the Scala interface will be visible + * via the Java interface and vice versa. + * + * If the Java `Iterable` was previously obtained from an implicit or + * explicit call of `iterableAsScalaIterable(scala.collection.Iterable)` + * then the original Scala Iterable will be returned. + * + * @param i The Iterable to be converted. + * @return A Scala Iterable view of the argument. + */ + implicit def iterableAsScalaIterable[A](i: jl.Iterable[A]): Iterable[A] = i match { + case IterableWrapper(wrapped) => wrapped + case _ => JIterableWrapper(i) + } + + /** + * Implicitly converts a Java `Collection` to an Scala `Iterable`. + * + * If the Java `Collection` was previously obtained from an implicit or + * explicit call of `collectionAsScalaIterable(scala.collection.SizedIterable)` + * then the original Scala `Iterable` will be returned. + * + * @param i The Collection to be converted. + * @return A Scala Iterable view of the argument. + */ + implicit def collectionAsScalaIterable[A](i: ju.Collection[A]): Iterable[A] = i match { + case IterableWrapper(wrapped) => wrapped + case _ => JCollectionWrapper(i) + } + + /** + * Implicitly converts a Java `List` to a Scala mutable `Buffer`. + * + * The returned Scala `Buffer` is backed by the provided Java `List` + * and any side-effects of using it via the Scala interface will + * be visible via the Java interface and vice versa. + * + * If the Java `List` was previously obtained from an implicit or + * explicit call of `asScalaBuffer(scala.collection.mutable.Buffer)` + * then the original Scala `Buffer` will be returned. + * + * @param l The `List` to be converted. + * @return A Scala mutable `Buffer` view of the argument. + */ + implicit def asScalaBuffer[A](l: ju.List[A]): mutable.Buffer[A] = l match { + case MutableBufferWrapper(wrapped) => wrapped + case _ =>new JListWrapper(l) + } + + /** + * Implicitly converts a Java Set to a Scala mutable Set. + * The returned Scala Set is backed by the provided Java + * Set and any side-effects of using it via the Scala interface will + * be visible via the Java interface and vice versa. + * + * If the Java Set was previously obtained from an implicit or + * explicit call of `asScalaSet(scala.collection.mutable.Set)` then + * the original Scala Set will be returned. + * + * @param s The Set to be converted. + * @return A Scala mutable Set view of the argument. + */ + implicit def asScalaSet[A](s: ju.Set[A]): mutable.Set[A] = s match { + case MutableSetWrapper(wrapped) => wrapped + case _ =>new JSetWrapper(s) + } + + /** + * Implicitly converts a Java `Map` to a Scala mutable `Map`. + * + * The returned Scala `Map` is backed by the provided Java `Map` and any + * side-effects of using it via the Scala interface will be visible via + * the Java interface and vice versa. + * + * If the Java `Map` was previously obtained from an implicit or + * explicit call of `mapAsScalaMap(scala.collection.mutable.Map)` then + * the original Scala Map will be returned. + * + * @param m The Map to be converted. + * @return A Scala mutable Map view of the argument. + */ + implicit def mapAsScalaMap[A, B](m: ju.Map[A, B]): mutable.Map[A, B] = m match { + //case ConcurrentMapWrapper(wrapped) => wrapped + case MutableMapWrapper(wrapped) => wrapped + case _ => new JMapWrapper(m) + } + + /** + * Implicitly converts a Java ConcurrentMap to a Scala mutable ConcurrentMap. + * The returned Scala ConcurrentMap is backed by the provided Java + * ConcurrentMap and any side-effects of using it via the Scala interface will + * be visible via the Java interface and vice versa. + * + * If the Java ConcurrentMap was previously obtained from an implicit or + * explicit call of `asConcurrentMap(scala.collection.mutable.ConcurrentMap)` + * then the original Scala ConcurrentMap will be returned. + * + * @param m The ConcurrentMap to be converted. + * @return A Scala mutable ConcurrentMap view of the argument. + */ + implicit def asScalaConcurrentMap[A, B](m: juc.ConcurrentMap[A, B]): mutable.ConcurrentMap[A, B] = m match { + case cmw: ConcurrentMapWrapper[a, b] => cmw.underlying + case _ => new JConcurrentMapWrapper(m) + } + + /** + * Implicitly converts a Java `Dictionary` to a Scala mutable + * `Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java + * `Dictionary` and any side-effects of using it via the Scala interface + * will be visible via the Java interface and vice versa. + * + * @param m The Dictionary to be converted. + * @return A Scala mutable Map[String, String] view of the argument. + */ + implicit def dictionaryAsScalaMap[A, B](p: ju.Dictionary[A, B]): mutable.Map[A, B] = p match { + case DictionaryWrapper(wrapped) => wrapped + case _ => new JDictionaryWrapper(p) + } + + /** + * Implicitly converts a Java `Properties` to a Scala `mutable Map[String, String]`. + * + * The returned Scala `Map[String, String]` is backed by the provided Java + * `Properties` and any side-effects of using it via the Scala interface + * will be visible via the Java interface and vice versa. + * + * @param m The Properties to be converted. + * @return A Scala mutable Map[String, String] view of the argument. + */ + implicit def propertiesAsScalaMap(p: ju.Properties): mutable.Map[String, String] = p match { + case _ => new JPropertiesWrapper(p) + } +} + +object WrapAsScala extends WrapAsScala { } diff --git a/src/library/scala/collection/convert/Wrappers.scala b/src/library/scala/collection/convert/Wrappers.scala new file mode 100644 index 0000000000..8136e462cb --- /dev/null +++ b/src/library/scala/collection/convert/Wrappers.scala @@ -0,0 +1,422 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection +package convert + +import java.{ lang => jl, util => ju }, java.util.{ concurrent => juc } +import WrapAsScala._ +import WrapAsJava._ + +/** Don't put the implementations in the same scope as the implicits + * which utilize them, or they will stow away into every scope which + * extends one of those implementations. See SI-5580. + */ +private[collection] trait Wrappers { + trait IterableWrapperTrait[A] extends ju.AbstractCollection[A] { + val underlying: Iterable[A] + def size = underlying.size + override def iterator = IteratorWrapper(underlying.iterator) + override def isEmpty = underlying.isEmpty + } + + case class IteratorWrapper[A](underlying: Iterator[A]) extends ju.Iterator[A] with ju.Enumeration[A] { + def hasNext = underlying.hasNext + def next() = underlying.next + def hasMoreElements = underlying.hasNext + def nextElement() = underlying.next + def remove() = throw new UnsupportedOperationException + } + + class ToIteratorWrapper[A](underlying : Iterator[A]) { + def asJava = new IteratorWrapper(underlying) + } + + case class JIteratorWrapper[A](underlying: ju.Iterator[A]) extends AbstractIterator[A] with Iterator[A] { + def hasNext = underlying.hasNext + def next() = underlying.next + } + + case class JEnumerationWrapper[A](underlying: ju.Enumeration[A]) extends AbstractIterator[A] with Iterator[A] { + def hasNext = underlying.hasMoreElements + def next() = underlying.nextElement + } + + case class IterableWrapper[A](underlying: Iterable[A]) extends ju.AbstractCollection[A] with IterableWrapperTrait[A] { } + + case class JIterableWrapper[A](underlying: jl.Iterable[A]) extends AbstractIterable[A] with Iterable[A] { + def iterator = underlying.iterator + def newBuilder[B] = new mutable.ArrayBuffer[B] + } + + case class JCollectionWrapper[A](underlying: ju.Collection[A]) extends AbstractIterable[A] with Iterable[A] { + def iterator = underlying.iterator + override def size = underlying.size + override def isEmpty = underlying.isEmpty + def newBuilder[B] = new mutable.ArrayBuffer[B] + } + + case class SeqWrapper[A](underlying: Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { + def get(i: Int) = underlying(i) + } + + case class MutableSeqWrapper[A](underlying: mutable.Seq[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { + val p = underlying(i) + underlying(i) = elem + p + } + } + + case class MutableBufferWrapper[A](underlying: mutable.Buffer[A]) extends ju.AbstractList[A] with IterableWrapperTrait[A] { + def get(i: Int) = underlying(i) + override def set(i: Int, elem: A) = { val p = underlying(i); underlying(i) = elem; p } + override def add(elem: A) = { underlying append elem; true } + override def remove(i: Int) = underlying remove i + } + + case class JListWrapper[A](val underlying: ju.List[A]) extends mutable.AbstractBuffer[A] with mutable.Buffer[A] { + def length = underlying.size + override def isEmpty = underlying.isEmpty + override def iterator: Iterator[A] = underlying.iterator + def apply(i: Int) = underlying.get(i) + def update(i: Int, elem: A) = underlying.set(i, elem) + def +=:(elem: A) = { underlying.subList(0, 0) add elem; this } + def +=(elem: A): this.type = { underlying add elem; this } + def insertAll(i: Int, elems: Traversable[A]) = { + val ins = underlying.subList(0, i) + elems.seq.foreach(ins.add(_)) + } + def remove(i: Int) = underlying.remove(i) + def clear() = underlying.clear() + def result = this + } + + class SetWrapper[A](underlying: Set[A]) extends ju.AbstractSet[A] { + self => + def size = underlying.size + def iterator = new ju.Iterator[A] { + val ui = underlying.iterator + var prev: Option[A] = None + def hasNext = ui.hasNext + def next = { val e = ui.next; prev = Some(e); e } + def remove = prev match { + case Some(e) => + underlying match { + case ms: mutable.Set[a] => + ms remove e + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + + case class MutableSetWrapper[A](underlying: mutable.Set[A]) extends SetWrapper[A](underlying) { + override def add(elem: A) = { + val sz = underlying.size + underlying += elem + sz < underlying.size + } + override def remove(elem: AnyRef) = + try underlying remove elem.asInstanceOf[A] + catch { case ex: ClassCastException => false } + override def clear() = underlying.clear() + } + + case class JSetWrapper[A](underlying: ju.Set[A]) extends mutable.AbstractSet[A] with mutable.Set[A] with mutable.SetLike[A, JSetWrapper[A]] { + + override def size = underlying.size + + def iterator = underlying.iterator + + def contains(elem: A): Boolean = underlying.contains(elem) + + def +=(elem: A): this.type = { underlying add elem; this } + def -=(elem: A): this.type = { underlying remove elem; this } + + override def add(elem: A): Boolean = underlying add elem + override def remove(elem: A): Boolean = underlying remove elem + override def clear() = underlying.clear() + + override def empty = JSetWrapper(new ju.HashSet[A]) + } + + class MapWrapper[A, B](underlying: Map[A, B]) extends ju.AbstractMap[A, B] { self => + override def size = underlying.size + + override def get(key: AnyRef): B = try { + underlying get key.asInstanceOf[A] match { + case None => null.asInstanceOf[B] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[B] + } + + override def entrySet: ju.Set[ju.Map.Entry[A, B]] = new ju.AbstractSet[ju.Map.Entry[A, B]] { + def size = self.size + + def iterator = new ju.Iterator[ju.Map.Entry[A, B]] { + val ui = underlying.iterator + var prev : Option[A] = None + + def hasNext = ui.hasNext + + def next() = { + val (k, v) = ui.next + prev = Some(k) + new ju.Map.Entry[A, B] { + def getKey = k + def getValue = v + def setValue(v1 : B) = self.put(k, v1) + override def hashCode = k.hashCode + v.hashCode + override def equals(other: Any) = other match { + case e: ju.Map.Entry[_, _] => k == e.getKey && v == e.getValue + case _ => false + } + } + } + + def remove() { + prev match { + case Some(k) => + underlying match { + case mm: mutable.Map[a, _] => + mm remove k + prev = None + case _ => + throw new UnsupportedOperationException("remove") + } + case _ => + throw new IllegalStateException("next must be called at least once before remove") + } + } + } + } + } + + case class MutableMapWrapper[A, B](underlying: mutable.Map[A, B]) extends MapWrapper[A, B](underlying) { + override def put(k: A, v: B) = underlying.put(k, v) match { + case Some(v1) => v1 + case None => null.asInstanceOf[B] + } + + override def remove(k: AnyRef): B = try { + underlying remove k.asInstanceOf[A] match { + case None => null.asInstanceOf[B] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[B] + } + + override def clear() = underlying.clear() + } + + trait JMapWrapperLike[A, B, +Repr <: mutable.MapLike[A, B, Repr] with mutable.Map[A, B]] extends mutable.Map[A, B] with mutable.MapLike[A, B, Repr] { + def underlying: ju.Map[A, B] + + override def size = underlying.size + + def get(k: A) = { + val v = underlying get k + if (v != null) + Some(v) + else if (underlying containsKey k) + Some(null.asInstanceOf[B]) + else + None + } + + def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } + def -=(key: A): this.type = { underlying remove key; this } + + override def put(k: A, v: B): Option[B] = { + val r = underlying.put(k, v) + if (r != null) Some(r) else None + } + + override def update(k: A, v: B) { underlying.put(k, v) } + + override def remove(k: A): Option[B] = { + val r = underlying remove k + if (r != null) Some(r) else None + } + + def iterator: Iterator[(A, B)] = new AbstractIterator[(A, B)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { val e = ui.next(); (e.getKey, e.getValue) } + } + + override def clear() = underlying.clear() + + override def empty: Repr = null.asInstanceOf[Repr] + } + + case class JMapWrapper[A, B](val underlying : ju.Map[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JMapWrapper[A, B]] { + override def empty = JMapWrapper(new ju.HashMap[A, B]) + } + + class ConcurrentMapWrapper[A, B](override val underlying: mutable.ConcurrentMap[A, B]) extends MutableMapWrapper[A, B](underlying) with juc.ConcurrentMap[A, B] { + + def putIfAbsent(k: A, v: B) = underlying.putIfAbsent(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[B] + } + + def remove(k: AnyRef, v: AnyRef) = try { + underlying.remove(k.asInstanceOf[A], v.asInstanceOf[B]) + } catch { + case ex: ClassCastException => + false + } + + def replace(k: A, v: B): B = underlying.replace(k, v) match { + case Some(v) => v + case None => null.asInstanceOf[B] + } + + def replace(k: A, oldval: B, newval: B) = underlying.replace(k, oldval, newval) + } + + case class JConcurrentMapWrapper[A, B](val underlying: juc.ConcurrentMap[A, B]) extends mutable.AbstractMap[A, B] with JMapWrapperLike[A, B, JConcurrentMapWrapper[A, B]] with mutable.ConcurrentMap[A, B] { + override def get(k: A) = { + val v = underlying get k + if (v != null) Some(v) + else None + } + + override def empty = new JConcurrentMapWrapper(new juc.ConcurrentHashMap[A, B]) + + def putIfAbsent(k: A, v: B): Option[B] = { + val r = underlying.putIfAbsent(k, v) + if (r != null) Some(r) else None + } + + def remove(k: A, v: B): Boolean = underlying.remove(k, v) + + def replace(k: A, v: B): Option[B] = { + val prev = underlying.replace(k, v) + if (prev != null) Some(prev) else None + } + + def replace(k: A, oldvalue: B, newvalue: B): Boolean = + underlying.replace(k, oldvalue, newvalue) + } + + case class DictionaryWrapper[A, B](underlying: mutable.Map[A, B]) extends ju.Dictionary[A, B] { + def size: Int = underlying.size + def isEmpty: Boolean = underlying.isEmpty + def keys: ju.Enumeration[A] = asJavaEnumeration(underlying.keysIterator) + def elements: ju.Enumeration[B] = asJavaEnumeration(underlying.valuesIterator) + def get(key: AnyRef) = try { + underlying get key.asInstanceOf[A] match { + case None => null.asInstanceOf[B] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[B] + } + def put(key: A, value: B): B = underlying.put(key, value) match { + case Some(v) => v + case None => null.asInstanceOf[B] + } + override def remove(key: AnyRef) = try { + underlying remove key.asInstanceOf[A] match { + case None => null.asInstanceOf[B] + case Some(v) => v + } + } catch { + case ex: ClassCastException => null.asInstanceOf[B] + } + } + + case class JDictionaryWrapper[A, B](underlying: ju.Dictionary[A, B]) extends mutable.AbstractMap[A, B] with mutable.Map[A, B] { + override def size: Int = underlying.size + + def get(k: A) = { + val v = underlying get k + if (v != null) Some(v) else None + } + + def +=(kv: (A, B)): this.type = { underlying.put(kv._1, kv._2); this } + def -=(key: A): this.type = { underlying remove key; this } + + override def put(k: A, v: B): Option[B] = { + val r = underlying.put(k, v) + if (r != null) Some(r) else None + } + + override def update(k: A, v: B) { underlying.put(k, v) } + + override def remove(k: A): Option[B] = { + val r = underlying remove k + if (r != null) Some(r) else None + } + + def iterator = enumerationAsScalaIterator(underlying.keys) map (k => (k, underlying get k)) + + override def clear() = underlying.clear() + } + + case class JPropertiesWrapper(underlying: ju.Properties) extends mutable.AbstractMap[String, String] + with mutable.Map[String, String] + with mutable.MapLike[String, String, JPropertiesWrapper] { + + override def size = underlying.size + + def get(k: String) = { + val v = underlying get k + if (v != null) Some(v.asInstanceOf[String]) else None + } + + def +=(kv: (String, String)): this.type = { underlying.put(kv._1, kv._2); this } + def -=(key: String): this.type = { underlying remove key; this } + + override def put(k: String, v: String): Option[String] = { + val r = underlying.put(k, v) + if (r != null) Some(r.asInstanceOf[String]) else None + } + + override def update(k: String, v: String) { underlying.put(k, v) } + + override def remove(k: String): Option[String] = { + val r = underlying remove k + if (r != null) Some(r.asInstanceOf[String]) else None + } + + def iterator: Iterator[(String, String)] = new AbstractIterator[(String, String)] { + val ui = underlying.entrySet.iterator + def hasNext = ui.hasNext + def next() = { + val e = ui.next() + (e.getKey.asInstanceOf[String], e.getValue.asInstanceOf[String]) + } + } + + override def clear() = underlying.clear() + + override def empty = JPropertiesWrapper(new ju.Properties) + + def getProperty(key: String) = underlying.getProperty(key) + + def getProperty(key: String, defaultValue: String) = + underlying.getProperty(key, defaultValue) + + def setProperty(key: String, value: String) = + underlying.setProperty(key, value) + } +} + +object Wrappers extends Wrappers diff --git a/src/library/scala/collection/convert/package.scala b/src/library/scala/collection/convert/package.scala new file mode 100644 index 0000000000..2f8bca1e1f --- /dev/null +++ b/src/library/scala/collection/convert/package.scala @@ -0,0 +1,18 @@ +/* __ *\ +** ________ ___ / / ___ Scala API ** +** / __/ __// _ | / / / _ | (c) 2006-2011, LAMP/EPFL ** +** __\ \/ /__/ __ |/ /__/ __ | http://www.scala-lang.org/ ** +** /____/\___/_/ |_/____/_/ | | ** +** |/ ** +\* */ + +package scala.collection + +package object convert { + val decorateAsJava = new DecorateAsJava { } + val decorateAsScala = new DecorateAsScala { } + val decorateAll = new DecorateAsJava with DecorateAsScala { } + val wrapAsJava = new WrapAsJava { } + val wrapAsScala = new WrapAsScala { } + val wrapAll = new WrapAsJava with WrapAsScala { } +} diff --git a/src/library/scala/collection/mutable/WeakHashMap.scala b/src/library/scala/collection/mutable/WeakHashMap.scala index 89d7c7a695..4e09755acf 100644 --- a/src/library/scala/collection/mutable/WeakHashMap.scala +++ b/src/library/scala/collection/mutable/WeakHashMap.scala @@ -6,14 +6,11 @@ ** |/ ** \* */ - - package scala.collection package mutable -import JavaConversions._ import generic._ - +import convert.Wrappers._ /** A hash map with references to entries which are weakly reachable. Entries are * removed from this map when the key is no longer (strongly) referenced. This class wraps diff --git a/test/files/neg/saferJavaConversions.check b/test/files/neg/saferJavaConversions.check new file mode 100644 index 0000000000..0e53d2c437 --- /dev/null +++ b/test/files/neg/saferJavaConversions.check @@ -0,0 +1,6 @@ +saferJavaConversions.scala:13: error: type mismatch; + found : String("a") + required: Foo + val v = map.get("a") // now this is a type error + ^ +one error found diff --git a/test/files/neg/saferJavaConversions.scala b/test/files/neg/saferJavaConversions.scala new file mode 100644 index 0000000000..f0611204e6 --- /dev/null +++ b/test/files/neg/saferJavaConversions.scala @@ -0,0 +1,20 @@ + +case class Foo(s: String) + +object Test { + def f1 = { + import scala.collection.JavaConversions._ + val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") + val v = map.get("a") // should be a type error, actually returns null + } + def f2 = { + import scala.collection.convert.wrapAsScala._ + val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") + val v = map.get("a") // now this is a type error + } + def f3 = { + import scala.collection.convert.wrapAsJava._ + val map: Map[Foo, String] = Map(Foo("a") -> "a", Foo("b") -> "b") + val v = map.get("a") + } +} diff --git a/test/files/neg/t5580a.check b/test/files/neg/t5580a.check new file mode 100644 index 0000000000..50a31857d5 --- /dev/null +++ b/test/files/neg/t5580a.check @@ -0,0 +1,6 @@ +t5580a.scala:9: error: polymorphic expression cannot be instantiated to expected type; + found : [A]scala.collection.mutable.Set[A] + required: scala.collection.mutable.Map[bar,scala.collection.mutable.Set[bar]] + if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set()) + ^ +one error found diff --git a/test/files/neg/t5580a.scala b/test/files/neg/t5580a.scala new file mode 100644 index 0000000000..742f0e85ea --- /dev/null +++ b/test/files/neg/t5580a.scala @@ -0,0 +1,11 @@ +import scala.collection.mutable.WeakHashMap + +class bar{ } +class foo{ + val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]() + + def test={ + val tmp:bar=null + if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set()) + } +} diff --git a/test/files/pos/t5580b.scala b/test/files/pos/t5580b.scala new file mode 100644 index 0000000000..d5a4a0a2b2 --- /dev/null +++ b/test/files/pos/t5580b.scala @@ -0,0 +1,19 @@ +/** It's a pos test because it does indeed compile, + * not so much because I'm glad it does. Testing + * that error messages created and discarded during + * implicit search don't blow it up. + */ + +import scala.collection.mutable.WeakHashMap +import scala.collection.JavaConversions._ + +class bar { } + +class foo { + val map = WeakHashMap[AnyRef, collection.mutable.Map[bar, collection.mutable.Set[bar]]]() + + def test={ + val tmp:bar=null + if (map.get(tmp).isEmpty) map.put(tmp,collection.mutable.Set()) + } +} -- cgit v1.2.3 From c535aec63f6e30b0b689ad60b1dd2f1b78c66039 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 21 Mar 2012 14:12:25 -0700 Subject: An illustrative delayedInit test. --- test/files/run/delay-bad.check | 47 +++++++++++++++++++++++++ test/files/run/delay-bad.scala | 77 +++++++++++++++++++++++++++++++++++++++++ test/files/run/delay-good.check | 41 ++++++++++++++++++++++ test/files/run/delay-good.scala | 77 +++++++++++++++++++++++++++++++++++++++++ 4 files changed, 242 insertions(+) create mode 100644 test/files/run/delay-bad.check create mode 100644 test/files/run/delay-bad.scala create mode 100644 test/files/run/delay-good.check create mode 100644 test/files/run/delay-good.scala (limited to 'test/files') diff --git a/test/files/run/delay-bad.check b/test/files/run/delay-bad.check new file mode 100644 index 0000000000..9d9c828a03 --- /dev/null +++ b/test/files/run/delay-bad.check @@ -0,0 +1,47 @@ + + +// new C { } +-A -B -C + +// new C { 5 } +-A -B -C + A+ B+ C+ + +// new D() +-A -B -C -D + A+ B+ C+ D+ + +// new D() { } +-A -B -C -D + A+ B+ C+ D+ + +// new D() { val x = 5 } +-A -B -C -D + A+ B+ C+ D+ + A+ B+ C+ D+ + +// new { val x = 5 } with D() +-A -B -C -D + A+ B+ C+ D+ + +// new E() { val x = 5 } +-A -B -C -D + A+ B+ C+ D+ E+ -E + A+ B+ C+ D+ E+ + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() +-A -B -C -D + A+ B+ C+ D+ E+ -E + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() { } +-A -B -C -D + A+ B+ C+ D+ E+ -E + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() { 5 } +-A -B -C -D + A+ B+ C+ D+ E+ -E + A+ B+ C+ D+ E+ + A+ B+ C+ D+ E+ diff --git a/test/files/run/delay-bad.scala b/test/files/run/delay-bad.scala new file mode 100644 index 0000000000..43acc1ea3d --- /dev/null +++ b/test/files/run/delay-bad.scala @@ -0,0 +1,77 @@ +trait A extends DelayedInit +{ + print("-A") + + def delayedInit(body: => Unit) = { + body + postConstructionCode + } + def postConstructionCode: Unit = { + print("\n A+") + } +} +trait B extends A { + print(" -B") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" B+") + } +} + +trait C extends B { + print(" -C") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" C+") + } +} + +class D() extends C { + print(" -D") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" D+") + } +} +class E() extends D() { + print(" -E") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" E+") + } +} + +object Test { + def p(msg: String) = println("\n\n// " + msg) + + def main(args: Array[String]) { + val f: A => Unit = _ => () + + p("new C { }") + f(new C { }) + p("new C { 5 }") + f(new C { 5 }) + + p("new D()") + f(new D()) + p("new D() { }") + f(new D() { }) + + p("new D() { val x = 5 }") + f(new D() { val x = 5 }) + p("new { val x = 5 } with D()") + f(new { val x = 5 } with D()) + + p("new E() { val x = 5 }") + f(new E() { val x = 5 }) + p("new { val x = 5 } with E()") + f(new { val x = 5 } with E()) + + p("new { val x = 5 } with E() { }") + f(new { val x = 5 } with E() { }) + p("new { val x = 5 } with E() { 5 }") + f(new { val x = 5 } with E() { 5 }) + + println("") + } +} diff --git a/test/files/run/delay-good.check b/test/files/run/delay-good.check new file mode 100644 index 0000000000..8eb04c7cff --- /dev/null +++ b/test/files/run/delay-good.check @@ -0,0 +1,41 @@ + + +// new C { } +-A -B -C + A+ B+ C+ + +// new C { 5 } +-A -B -C + A+ B+ C+ + +// new D() +-A -B -C -D + A+ B+ C+ D+ + +// new D() { } +-A -B -C -D + A+ B+ C+ D+ + +// new D() { val x = 5 } +-A -B -C -D + A+ B+ C+ D+ + +// new { val x = 5 } with D() +-A -B -C -D + A+ B+ C+ D+ + +// new E() { val x = 5 } +-A -B -C -D -E + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() +-A -B -C -D -E + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() { } +-A -B -C -D -E + A+ B+ C+ D+ E+ + +// new { val x = 5 } with E() { 5 } +-A -B -C -D -E + A+ B+ C+ D+ E+ diff --git a/test/files/run/delay-good.scala b/test/files/run/delay-good.scala new file mode 100644 index 0000000000..2e4487b92c --- /dev/null +++ b/test/files/run/delay-good.scala @@ -0,0 +1,77 @@ +trait A +{ + print("-A") + + def delayedInit(body: => Unit) = { + body + postConstructionCode + } + def postConstructionCode: Unit = { + print("\n A+") + } +} +trait B extends A { + print(" -B") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" B+") + } +} + +trait C extends B { + print(" -C") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" C+") + } +} + +class D() extends C { + print(" -D") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" D+") + } +} +class E() extends D() { + print(" -E") + override def postConstructionCode: Unit = { + super.postConstructionCode + print(" E+") + } +} + +object Test { + def p(msg: String) = println("\n\n// " + msg) + + def main(args: Array[String]) { + val f: A => Unit = _.postConstructionCode + + p("new C { }") + f(new C { }) + p("new C { 5 }") + f(new C { 5 }) + + p("new D()") + f(new D()) + p("new D() { }") + f(new D() { }) + + p("new D() { val x = 5 }") + f(new D() { val x = 5 }) + p("new { val x = 5 } with D()") + f(new { val x = 5 } with D()) + + p("new E() { val x = 5 }") + f(new E() { val x = 5 }) + p("new { val x = 5 } with E()") + f(new { val x = 5 } with E()) + + p("new { val x = 5 } with E() { }") + f(new { val x = 5 } with E() { }) + p("new { val x = 5 } with E() { 5 }") + f(new { val x = 5 } with E() { 5 }) + + println("") + } +} -- cgit v1.2.3 From cd73ee6c0fce8b1010c0677697d91585734c2861 Mon Sep 17 00:00:00 2001 From: Iulian Dragos Date: Thu, 22 Mar 2012 18:25:05 +0100 Subject: Increased the timeout from 5s to 60s to make the presentation compiler shutdown test more resilient on slow machines. --- test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'test/files') diff --git a/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala index 53af84541a..cef9d2a5ed 100644 --- a/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala +++ b/test/files/presentation/shutdown-deadlock/ShutdownDeadlockTest.scala @@ -20,7 +20,7 @@ object Test extends InteractiveTest { } for ((j, i) <- jobs1.zipWithIndex) { - j.get(5000) match { + j.get(40000) match { case None => println(i + ": TIMEOUT") exit(1) // no need to delay the test any longer -- cgit v1.2.3 From fb788e03f6940ab1a3551d6a1f295821c118dc70 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 22 Mar 2012 12:44:25 -0700 Subject: Test case closes SI-4987. --- test/files/neg/t4987.check | 4 ++++ test/files/neg/t4987.scala | 2 ++ 2 files changed, 6 insertions(+) create mode 100644 test/files/neg/t4987.check create mode 100644 test/files/neg/t4987.scala (limited to 'test/files') diff --git a/test/files/neg/t4987.check b/test/files/neg/t4987.check new file mode 100644 index 0000000000..8d7344d27b --- /dev/null +++ b/test/files/neg/t4987.check @@ -0,0 +1,4 @@ +t4987.scala:2: error: constructor Foo2 in class Foo2 cannot be accessed in object Bar2 +object Bar2 { new Foo2(0, 0) } + ^ +one error found diff --git a/test/files/neg/t4987.scala b/test/files/neg/t4987.scala new file mode 100644 index 0000000000..e55acd4127 --- /dev/null +++ b/test/files/neg/t4987.scala @@ -0,0 +1,2 @@ +class Foo2 private (a: Int, b: Int) +object Bar2 { new Foo2(0, 0) } -- cgit v1.2.3 From 3db29dde051614d976bca92a1cdeb109c9c0ab01 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Thu, 22 Mar 2012 23:27:22 -0700 Subject: Test case for cause of previous commit's reversion. --- test/files/run/nonlocalreturn.check | 1 + test/files/run/nonlocalreturn.scala | 15 +++++++++++++++ 2 files changed, 16 insertions(+) create mode 100644 test/files/run/nonlocalreturn.check create mode 100644 test/files/run/nonlocalreturn.scala (limited to 'test/files') diff --git a/test/files/run/nonlocalreturn.check b/test/files/run/nonlocalreturn.check new file mode 100644 index 0000000000..aeb2d5e239 --- /dev/null +++ b/test/files/run/nonlocalreturn.check @@ -0,0 +1 @@ +Some(1) diff --git a/test/files/run/nonlocalreturn.scala b/test/files/run/nonlocalreturn.scala new file mode 100644 index 0000000000..3c1e7420ed --- /dev/null +++ b/test/files/run/nonlocalreturn.scala @@ -0,0 +1,15 @@ +object Test { + def wrap[K](body: => K): K = body + + def f(): Option[Int] = { + wrap({ return Some(1) ; None }) + } + + def main(args: Array[String]) { + println(f()) + } +} +// java.lang.ClassCastException: scala.Some cannot be cast to scala.None$ +// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5) +// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5) +// at Test$.wrap(nonlocalreturn.scala:2) -- cgit v1.2.3 From 91c5a3ba9a38e34617315c97876baed4e7eab671 Mon Sep 17 00:00:00 2001 From: Lukas Rytz Date: Fri, 23 Mar 2012 10:24:43 +0100 Subject: Undo the revert. reverts 3db29dde051614d976bca92a1cdeb109c9c0ab01 and 5af2bf54d21ac63236cd6e68586b2c38fa0f28c3 restores 19a48510c2e18430a35319c04dfe3bad7119f23f --- src/compiler/scala/reflect/internal/Trees.scala | 12 ++++++++---- test/files/run/nonlocalreturn.check | 1 - test/files/run/nonlocalreturn.scala | 15 --------------- 3 files changed, 8 insertions(+), 20 deletions(-) delete mode 100644 test/files/run/nonlocalreturn.check delete mode 100644 test/files/run/nonlocalreturn.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Trees.scala b/src/compiler/scala/reflect/internal/Trees.scala index ac2147d284..1a40e0105c 100644 --- a/src/compiler/scala/reflect/internal/Trees.scala +++ b/src/compiler/scala/reflect/internal/Trees.scala @@ -324,10 +324,14 @@ trait Trees extends api.Trees { self: SymbolTable => } class ChangeOwnerTraverser(val oldowner: Symbol, val newowner: Symbol) extends Traverser { - def changeOwner(tree: Tree) = { - if ((tree.isDef || tree.isInstanceOf[Function]) && - tree.symbol != NoSymbol && tree.symbol.owner == oldowner) - tree.symbol.owner = newowner + def changeOwner(tree: Tree) = tree match { + case Return(expr) => + if (tree.symbol == oldowner) + tree.symbol = newowner + case _: DefTree | _: Function => + if (tree.symbol != NoSymbol && tree.symbol.owner == oldowner) + tree.symbol.owner = newowner + case _ => } override def traverse(tree: Tree) { changeOwner(tree) diff --git a/test/files/run/nonlocalreturn.check b/test/files/run/nonlocalreturn.check deleted file mode 100644 index aeb2d5e239..0000000000 --- a/test/files/run/nonlocalreturn.check +++ /dev/null @@ -1 +0,0 @@ -Some(1) diff --git a/test/files/run/nonlocalreturn.scala b/test/files/run/nonlocalreturn.scala deleted file mode 100644 index 3c1e7420ed..0000000000 --- a/test/files/run/nonlocalreturn.scala +++ /dev/null @@ -1,15 +0,0 @@ -object Test { - def wrap[K](body: => K): K = body - - def f(): Option[Int] = { - wrap({ return Some(1) ; None }) - } - - def main(args: Array[String]) { - println(f()) - } -} -// java.lang.ClassCastException: scala.Some cannot be cast to scala.None$ -// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5) -// at Test$$anonfun$f$1.apply(nonlocalreturn.scala:5) -// at Test$.wrap(nonlocalreturn.scala:2) -- cgit v1.2.3 From d786f269834c89e6de4a6a90e7a9f22c583bc30a Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 22 Mar 2012 10:39:29 +0100 Subject: [vpm] avoid verifyerror: leave jump to tail-pos label the following commit deals with the fall-out in basicblocks (double closing of blocks in ignore mode) --- src/compiler/scala/tools/nsc/transform/TailCalls.scala | 9 +++++++-- test/files/run/virtpatmat_tailcalls_verifyerror.check | 1 + test/files/run/virtpatmat_tailcalls_verifyerror.flags | 1 + test/files/run/virtpatmat_tailcalls_verifyerror.scala | 13 +++++++++++++ 4 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 test/files/run/virtpatmat_tailcalls_verifyerror.check create mode 100644 test/files/run/virtpatmat_tailcalls_verifyerror.flags create mode 100644 test/files/run/virtpatmat_tailcalls_verifyerror.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/transform/TailCalls.scala b/src/compiler/scala/tools/nsc/transform/TailCalls.scala index ef76fe1b1c..9915f7e9fc 100644 --- a/src/compiler/scala/tools/nsc/transform/TailCalls.scala +++ b/src/compiler/scala/tools/nsc/transform/TailCalls.scala @@ -341,13 +341,18 @@ abstract class TailCalls extends Transform { else if (fun.symbol.isLabel && args.nonEmpty && args.tail.isEmpty && ctx.tailLabels(fun.symbol)) { // this is to detect tailcalls in translated matches // it's a one-argument call to a label that is in a tailposition and that looks like label(x) {x} - // thus, the argument to the call is in tailposition and we don't need to jump to the label, tail jump instead + // thus, the argument to the call is in tailposition val saved = ctx.tailPos ctx.tailPos = true debuglog("in tailpos label: "+ args.head) val res = transform(args.head) ctx.tailPos = saved - if (res ne args.head) res // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call + if (res ne args.head) { + // we tail-called -- TODO: shield from false-positives where we rewrite but don't tail-call + // must leave the jump to the original tailpos-label (fun)! + // there might be *a* tailcall *in* res, but it doesn't mean res *always* tailcalls + treeCopy.Apply(tree, fun, List(res)) + } else rewriteApply(fun, fun, Nil, args) } else rewriteApply(fun, fun, Nil, args) diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.check b/test/files/run/virtpatmat_tailcalls_verifyerror.check new file mode 100644 index 0000000000..c508d5366f --- /dev/null +++ b/test/files/run/virtpatmat_tailcalls_verifyerror.check @@ -0,0 +1 @@ +false diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.flags b/test/files/run/virtpatmat_tailcalls_verifyerror.flags new file mode 100644 index 0000000000..9769db9257 --- /dev/null +++ b/test/files/run/virtpatmat_tailcalls_verifyerror.flags @@ -0,0 +1 @@ + -Yvirtpatmat -Xexperimental diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.scala b/test/files/run/virtpatmat_tailcalls_verifyerror.scala new file mode 100644 index 0000000000..1ee613f09e --- /dev/null +++ b/test/files/run/virtpatmat_tailcalls_verifyerror.scala @@ -0,0 +1,13 @@ +// shouldn't result in a verify error when run... +object Test extends App { + @annotation.tailrec + final def test(meh: Boolean): Boolean = { + Some("a") match { + case x => + x match { + case _ => if(meh) test(false) else false + } + } + } + println(test(true)) +} \ No newline at end of file -- cgit v1.2.3 From 09eda4ef92168685ef3d301439bb8b6df76f982e Mon Sep 17 00:00:00 2001 From: Adriaan Moors Date: Thu, 22 Mar 2012 17:36:12 +0100 Subject: do nothing when closing closed block in ignoremode this came to light with the virtual pattern matcher, which emits jumps like `matchEnd3(_test(Test.this, false))`, where _test is a tailcall the nested jumping caused double-closing (the second time in ignore mode) thus. when closing a closed block in ignore mode, simply do nothing from genLoad for label-jumps: note: when one of the args to genLoadLabelArguments is a jump to a label, it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true, this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored, however, as emitOnly will close the block, which reverses its instructions (when it's still open), we better not reverse when the block has already been closed but is in ignore mode (if it's not in ignore mode, double-closing is an error) @dragos figured it out, all I did was write the comment and the `if` test case to repro basic blocks crasher the tailcall in the forward jump `matchEnd3(_test(Test.this, false))` in the following program crashes the back-end (error below) @scala.annotation.tailrec final def test(meh: Boolean): Boolean = { val _$this: Test.type = Test.this; _test(_$this,meh){ case val x1: Some[String] = new Some[String]("a"); case3(){ matchEnd2({ case val x1: Some[String] = x1; case4(){ if (x1.ne(null)) matchEnd3(if (meh) _test(Test.this, false) else false) else case5() }; case5(){ matchEnd3(_test(Test.this, false)) }; matchEnd3(x){ x } }) }; matchEnd2(x){ x } } }; The last instruction (of basic block 11) is not a control flow instruction: CONSTANT(false) // methods def test(meh: Boolean (BOOL)): Boolean { locals: value meh, value _$this, value x1, value x, value x, value x1 startBlock: 1 blocks: [1,2,3,4,5,6,7,8,9,10,11,12,13] 1: 4 JUMP 2 2: 5 NEW REF(class Some) 5 DUP(REF(class Some)) 5 CONSTANT("a") 5 CALL_METHOD scala.Some. (static-instance) 5 STORE_LOCAL(value x1) 5 SCOPE_ENTER value x1 5 JUMP 3 3: 5 LOAD_LOCAL(value x1) 7 STORE_LOCAL(value x1) 7 SCOPE_ENTER value x1 7 JUMP 4 4: 7 LOAD_LOCAL(value x1) 7 CZJUMP (REF(class Object))NE ? 5 : 6 5: 8 LOAD_LOCAL(value meh) 8 CZJUMP (BOOL)NE ? 8 : 9 6: ? JUMP 11 7: 7 DROP BOOL 7 JUMP 11 8: 8 CONSTANT(false) 8 STORE_LOCAL(value meh) 8 JUMP 2 9: 8 CONSTANT(false) 8 JUMP 10 10: 8 STORE_LOCAL(value x) 8 JUMP 12 11: 9 JUMP 2 9 STORE_LOCAL(value meh) 9 CONSTANT(false) 12: 7 LOAD_LOCAL(value x) 7 SCOPE_EXIT value x1 7 STORE_LOCAL(value x) 7 JUMP 13 13: 5 LOAD_LOCAL(value x) 5 SCOPE_EXIT value x1 5 RETURN(BOOL) --- .../scala/tools/nsc/backend/icode/BasicBlocks.scala | 14 ++++++++++---- src/compiler/scala/tools/nsc/backend/icode/GenICode.scala | 7 +++++++ test/files/run/virtpatmat_tailcalls_verifyerror.scala | 3 ++- 3 files changed, 19 insertions(+), 5 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala index 68c4ac03f6..4f3b0bf951 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/BasicBlocks.scala @@ -386,10 +386,16 @@ trait BasicBlocks { def close() { assert(!closed || ignore, this) assert(instructionList.nonEmpty, "Empty block: " + this) - closed = true - setFlag(DIRTYSUCCS) - instructionList = instructionList.reverse - instrs = instructionList.toArray + if (ignore && closed) { // redundant `ignore &&` for clarity -- we should never be in state `!ignore && closed` + // not doing anything to this block is important... + // because the else branch reverses innocent blocks, which is wrong when they're in ignore mode (and closed) + // reversing the instructions when (closed && ignore) wreaks havoc for nested label jumps (see comments in genLoad) + } else { + closed = true + setFlag(DIRTYSUCCS) + instructionList = instructionList.reverse + instrs = instructionList.toArray + } } def open() { diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 9e801e3ea8..41d9d93e7a 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -868,6 +868,13 @@ abstract class GenICode extends SubComponent { abort("Unknown label target: " + sym + " at: " + (fun.pos) + ": ctx: " + ctx) } }) + // note: when one of the args to genLoadLabelArguments is a jump to a label, + // it will call back into genLoad and arrive at this case, which will then set ctx1.bb.ignore to true, + // this is okay, since we're jumping unconditionally, so the loads and jumps emitted by the outer + // call to genLoad (by calling genLoadLabelArguments and emitOnly) can safely be ignored, + // however, as emitOnly will close the block, which reverses its instructions (when it's still open), + // we better not reverse when the block has already been closed but is in ignore mode + // (if it's not in ignore mode, double-closing is an error) val ctx1 = genLoadLabelArguments(args, label, ctx) ctx1.bb.emitOnly(if (label.anchored) JUMP(label.block) else PJUMP(label)) ctx1.bb.enterIgnoreMode diff --git a/test/files/run/virtpatmat_tailcalls_verifyerror.scala b/test/files/run/virtpatmat_tailcalls_verifyerror.scala index 1ee613f09e..5ce91e8dce 100644 --- a/test/files/run/virtpatmat_tailcalls_verifyerror.scala +++ b/test/files/run/virtpatmat_tailcalls_verifyerror.scala @@ -5,7 +5,8 @@ object Test extends App { Some("a") match { case x => x match { - case _ => if(meh) test(false) else false + case Some(_) => if(meh) test(false) else false + case _ => test(false) } } } -- cgit v1.2.3 From e3dec9f006ac2631281fb936c4ca206daa8fda5d Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Wed, 25 Jan 2012 13:30:40 -0200 Subject: Regex improvements This adds findAllMatchIn to Regex to mirror other similar methods. It also overloads StringLike's "r", adding a version that accepts group names. It includes test cases for both methods. Closes SI-2460. --- .../scala/collection/immutable/StringLike.scala | 15 ++++++++-- src/library/scala/util/matching/Regex.scala | 23 ++++++++++++++-- test/files/scalacheck/t2460.scala | 32 ++++++++++++++++++++++ 3 files changed, 66 insertions(+), 4 deletions(-) create mode 100644 test/files/scalacheck/t2460.scala (limited to 'test/files') diff --git a/src/library/scala/collection/immutable/StringLike.scala b/src/library/scala/collection/immutable/StringLike.scala index f9697565de..fc4e7bf0a8 100644 --- a/src/library/scala/collection/immutable/StringLike.scala +++ b/src/library/scala/collection/immutable/StringLike.scala @@ -207,9 +207,20 @@ self => /** You can follow a string with `.r`, turning it into a `Regex`. E.g. * - * """A\w*""".r is the regular expression for identifiers starting with `A`. + * `"""A\w*""".r` is the regular expression for identifiers starting with `A`. */ - def r: Regex = new Regex(toString) + def r: Regex = r() + + /** You can follow a string with `.r(g1, ... , gn)`, turning it into a `Regex`, + * with group names g1 through gn. + * + * `"""(\d\d)-(\d\d)-(\d\d\d\d)""".r("month", "day", "year")` matches dates + * and provides its subcomponents through groups named "month", "day" and + * "year". + * + * @param groupNames The names of the groups in the pattern, in the order they appear. + */ + def r(groupNames: String*): Regex = new Regex(toString, groupNames: _*) def toBoolean: Boolean = parseBoolean(toString) def toByte: Byte = java.lang.Byte.parseByte(toString) diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index ca97515e23..2debd247b8 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -180,7 +180,7 @@ class Regex(regex: String, groupNames: String*) extends Serializable { None } - /** Return all matches of this regexp in given character sequence as a [[scala.util.mathcing.Regex.MatchIterator]], + /** Return all matches of this regexp in given character sequence as a [[scala.util.matching.Regex.MatchIterator]], * which is a special [[scala.collection.Iterator]] that returns the * matched strings, but can also be converted into a normal iterator * that returns objects of type [[scala.util.matching.Regex.Match]] @@ -193,6 +193,25 @@ class Regex(regex: String, groupNames: String*) extends Serializable { */ def findAllIn(source: java.lang.CharSequence) = new Regex.MatchIterator(source, this, groupNames) + + /** Return all matches of this regexp in given character sequence as a + * [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]. + * + * @param source The text to match against. + * @return A [[scala.collection.Iterator]] of [[scala.util.matching.Regex.Match]] for all matches. + * @example {{{for (words <- """\w+""".r findAllMatchIn "A simple example.") yield words.start}}} + */ + def findAllMatchIn(source: java.lang.CharSequence): Iterator[Match] = { + val matchIterator = findAllIn(source) + new Iterator[Match] { + def hasNext = matchIterator.hasNext + def next: Match = { + matchIterator.next; + new Match(matchIterator.source, matchIterator.matcher, matchIterator.groupNames).force + } + } + } + /** Return optionally first matching string of this regexp in given character sequence, * or None if it does not exist. * @@ -505,7 +524,7 @@ object Regex { class MatchIterator(val source: java.lang.CharSequence, val regex: Regex, val groupNames: Seq[String]) extends AbstractIterator[String] with Iterator[String] with MatchData { self => - protected val matcher = regex.pattern.matcher(source) + protected[Regex] val matcher = regex.pattern.matcher(source) private var nextSeen = false /** Is there another match? */ diff --git a/test/files/scalacheck/t2460.scala b/test/files/scalacheck/t2460.scala new file mode 100644 index 0000000000..196b43789f --- /dev/null +++ b/test/files/scalacheck/t2460.scala @@ -0,0 +1,32 @@ +import org.scalacheck.Prop.forAll +import org.scalacheck.Properties +import org.scalacheck.ConsoleReporter.testStatsEx +import org.scalacheck.{Test => SCTest} +import org.scalacheck.Gen + +object Test extends Properties("Regex : Ticket 2460") { + + val vowel = Gen.oneOf("a", "z") + + val numberOfMatch = forAll(vowel) { + (s: String) => "\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).size == 20 + } + + val numberOfGroup = forAll(vowel) { + (s: String) => "\\s*([a-z])\\s*([a-z])\\s*".r("data").findAllMatchIn((1 to 20).map(_ => s).mkString).next.groupCount == 2 + } + + val nameOfGroup = forAll(vowel) { + (s: String) => "([a-z])".r("data").findAllMatchIn(s).next.group("data") == s + } + + val tests = List( + ("numberOfMatch", numberOfMatch), + ("numberOfGroup", numberOfGroup), + ("nameOfGroup", nameOfGroup) + ) + + /*tests foreach { + case (name, p) => testStatsEx(name, SCTest.check(p)) + }*/ +} -- cgit v1.2.3 From 479dd13148c380619d3e9156ef1913467decc05c Mon Sep 17 00:00:00 2001 From: "Daniel C. Sobral" Date: Wed, 25 Jan 2012 15:34:12 -0200 Subject: Document regex replacement strings behavior. All replacement methods use dolar signs to identify groups in the matched string, and backslashes to escape characters. Document this behavior, and provide a method that can be used to properly quote replacement strings when this behavior is not desired. Closes SI-4750. --- src/library/scala/util/matching/Regex.scala | 35 +++++++++++++++++++++++++++-- test/files/run/si4750.check | 1 + test/files/run/si4750.scala | 7 ++++++ 3 files changed, 41 insertions(+), 2 deletions(-) create mode 100644 test/files/run/si4750.check create mode 100644 test/files/run/si4750.scala (limited to 'test/files') diff --git a/src/library/scala/util/matching/Regex.scala b/src/library/scala/util/matching/Regex.scala index 2debd247b8..3f21cc9724 100644 --- a/src/library/scala/util/matching/Regex.scala +++ b/src/library/scala/util/matching/Regex.scala @@ -133,6 +133,15 @@ import java.util.regex.{ Pattern, Matcher } * * @param regex A string representing a regular expression * @param groupNames A mapping from names to indices in capture groups + * + * @define replacementString + * In the replacement String, a dollar sign (`$`) followed by a number will be + * interpreted as a reference to a group in the matched pattern, with numbers + * 1 through 9 corresponding to the first nine groups, and 0 standing for the + * whole match. Any other character is an error. The backslash (`\`) character + * will be interpreted as an escape character, and can be used to escape the + * dollar sign. One can use [[scala.util.matching.Regex]]'s `quoteReplacement` + * to automatically escape these characters. */ @SerialVersionUID(-2094783597747625537L) class Regex(regex: String, groupNames: String*) extends Serializable { @@ -276,6 +285,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable { } /** Replaces all matches by a string. + * + * $replacementString * * @param target The string to match * @param replacement The string that will replace each match @@ -299,6 +310,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * val repl = datePattern replaceAllIn (text, m => m.group("month")+"/"+m.group("day")) * }}} * + * $replacementString + * * @param target The string to match. * @param replacer The function which maps a match to another string. * @return The target string after replacements. @@ -317,13 +330,15 @@ class Regex(regex: String, groupNames: String*) extends Serializable { * {{{ * import scala.util.matching.Regex._ * - * val map = Map("x" -> "a var", "y" -> "another var") + * val map = Map("x" -> "a var", "y" -> """some $ and \ signs""") * val text = "A text with variables %x, %y and %z." * val varPattern = """%(\w+)""".r - * val mapper = (m: Match) => map get (m group 1) + * val mapper = (m: Match) => map get (m group 1) map (quoteReplacement(_)) * val repl = varPattern replaceSomeIn (text, mapper) * }}} * + * $replacementString + * * @param target The string to match. * @param replacer The function which optionally maps a match to another string. * @return The target string after replacements. @@ -337,6 +352,8 @@ class Regex(regex: String, groupNames: String*) extends Serializable { } /** Replaces the first match by a string. + * + * $replacementString * * @param target The string to match * @param replacement The string that will replace the match @@ -588,4 +605,18 @@ object Regex { def replace(rs: String) = matcher.appendReplacement(sb, rs) } + + /** Quotes replacement strings to be used in replacement methods. + * + * Replacement methods give special meaning to backslashes (`\`) and + * dollar signs (`$`) in replacement strings, so they are not treated + * as literals. This method escapes these characters so the resulting + * string can be used as a literal replacement representing the input + * string. + * + * @param text The string one wishes to use as literal replacement. + * @return A string that can be used to replace matches with `text`. + * @example {{{"CURRENCY".r.replaceAllIn(input, Regex quoteReplacement "US$")}}} + */ + def quoteReplacement(text: String): String = Matcher quoteReplacement text } diff --git a/test/files/run/si4750.check b/test/files/run/si4750.check new file mode 100644 index 0000000000..bf55f70df3 --- /dev/null +++ b/test/files/run/si4750.check @@ -0,0 +1 @@ +US$ 5.80 diff --git a/test/files/run/si4750.scala b/test/files/run/si4750.scala new file mode 100644 index 0000000000..96d2c4fec7 --- /dev/null +++ b/test/files/run/si4750.scala @@ -0,0 +1,7 @@ +import scala.util.matching.Regex + +object Test extends App { + val input = "CURRENCY 5.80" + println("CURRENCY".r.replaceAllIn(input, Regex quoteReplacement "US$")) +} + -- cgit v1.2.3 From 2b90e85b68fca963ae66106d1ff1c21b1428056f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 27 Mar 2012 07:50:20 -0700 Subject: Workaround for "package is not a value". Not actually a fix, but when we see a package where a module is expected, it's not a great stretch to try the package object. References SI-5604. --- .../scala/tools/nsc/backend/icode/GenICode.scala | 53 ++++++++++++---------- test/files/pos/t5604/ReplConfig.scala | 53 ++++++++++++++++++++++ test/files/pos/t5604/ReplReporter.scala | 30 ++++++++++++ 3 files changed, 112 insertions(+), 24 deletions(-) create mode 100644 test/files/pos/t5604/ReplConfig.scala create mode 100644 test/files/pos/t5604/ReplReporter.scala (limited to 'test/files') diff --git a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala index 41d9d93e7a..8e568eca79 100644 --- a/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala +++ b/src/compiler/scala/tools/nsc/backend/icode/GenICode.scala @@ -944,11 +944,10 @@ abstract class GenICode extends SubComponent { "Trying to access the this of another class: " + "tree.symbol = " + tree.symbol + ", ctx.clazz.symbol = " + ctx.clazz.symbol + " compilation unit:"+unit) if (tree.symbol.isModuleClass && tree.symbol != ctx.clazz.symbol) { - debuglog("LOAD_MODULE from 'This': " + tree.symbol); - assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree) - genLoadModule(ctx, tree.symbol, tree.pos) + genLoadModule(ctx, tree) generatedType = REFERENCE(tree.symbol) - } else { + } + else { ctx.bb.emit(THIS(ctx.clazz.symbol), tree.pos) generatedType = REFERENCE( if (tree.symbol == ArrayClass) ObjectClass else ctx.clazz.symbol @@ -961,11 +960,7 @@ abstract class GenICode extends SubComponent { "Selection of non-module from empty package: " + tree + " sym: " + tree.symbol + " at: " + (tree.pos) ) - debuglog("LOAD_MODULE from Select(): " + tree.symbol) - - assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree) - genLoadModule(ctx, tree.symbol, tree.pos) - ctx + genLoadModule(ctx, tree) case Select(qualifier, selector) => val sym = tree.symbol @@ -973,14 +968,13 @@ abstract class GenICode extends SubComponent { val hostClass = qualifier.tpe.typeSymbol.orElse(sym.owner) if (sym.isModule) { - debuglog("LOAD_MODULE from Select(qualifier, selector): " + sym) - assert(!tree.symbol.isPackageClass, "Cannot use package as value: " + tree) - genLoadModule(ctx, sym, tree.pos) - ctx - } else if (sym.isStaticMember) { + genLoadModule(ctx, tree) + } + else if (sym.isStaticMember) { ctx.bb.emit(LOAD_FIELD(sym, true) setHostClass hostClass, tree.pos) ctx - } else { + } + else { val ctx1 = genLoadQualifier(tree, ctx) ctx1.bb.emit(LOAD_FIELD(sym, false) setHostClass hostClass, tree.pos) ctx1 @@ -990,11 +984,10 @@ abstract class GenICode extends SubComponent { val sym = tree.symbol if (!sym.isPackage) { if (sym.isModule) { - debuglog("LOAD_MODULE from Ident(name): " + sym) - assert(!sym.isPackageClass, "Cannot use package as value: " + tree) - genLoadModule(ctx, sym, tree.pos) + genLoadModule(ctx, tree) generatedType = toTypeKind(sym.info) - } else { + } + else { try { val Some(l) = ctx.method.lookupLocal(sym) ctx.bb.emit(LOAD_LOCAL(l), tree.pos) @@ -1207,8 +1200,19 @@ abstract class GenICode extends SubComponent { genLoad(arg, res, toTypeKind(tpe)) } - private def genLoadModule(ctx: Context, sym: Symbol, pos: Position) { - ctx.bb.emit(LOAD_MODULE(sym), pos) + private def genLoadModule(ctx: Context, tree: Tree): Context = { + // Working around SI-5604. Rather than failing the compile when we see + // a package here, check if there's a package object. + val sym = ( + if (!tree.symbol.isPackageClass) tree.symbol + else tree.symbol.info.member(nme.PACKAGE) match { + case NoSymbol => assert(false, "Cannot use package as value: " + tree) ; NoSymbol + case s => Console.err.println("Bug: found package class where package object expected. Converting.") ; s.moduleClass + } + ) + debuglog("LOAD_MODULE from %s: %s".format(tree.shortClass, sym)) + ctx.bb.emit(LOAD_MODULE(sym), tree.pos) + ctx } def genConversion(from: TypeKind, to: TypeKind, ctx: Context, cast: Boolean) = { @@ -1567,9 +1571,10 @@ abstract class GenICode extends SubComponent { val ctx1 = genLoad(l, ctx, ObjectReference) val ctx2 = genLoad(r, ctx1, ObjectReference) - ctx2.bb.emit(CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false))) - ctx2.bb.emit(CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL)) - ctx2.bb.close + ctx2.bb.emitOnly( + CALL_METHOD(equalsMethod, if (settings.optimise.value) Dynamic else Static(false)), + CZJUMP(thenCtx.bb, elseCtx.bb, NE, BOOL) + ) } else { if (isNull(l)) diff --git a/test/files/pos/t5604/ReplConfig.scala b/test/files/pos/t5604/ReplConfig.scala new file mode 100644 index 0000000000..8c589eba60 --- /dev/null +++ b/test/files/pos/t5604/ReplConfig.scala @@ -0,0 +1,53 @@ +/* NSC -- new Scala compiler + * Copyright 2005-2011 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package interpreter + +import util.Exceptional.unwrap +import util.stackTraceString + +trait ReplConfig { + lazy val replProps = new ReplProps + + class TapMaker[T](x: T) { + def tapInfo(msg: => String): T = tap(x => replinfo(parens(x))) + def tapDebug(msg: => String): T = tap(x => repldbg(parens(x))) + def tapTrace(msg: => String): T = tap(x => repltrace(parens(x))) + def tap[U](f: T => U): T = { + f(x) + x + } + } + + private def parens(x: Any) = "(" + x + ")" + private def echo(msg: => String) = + try Console println msg + catch { case x: AssertionError => Console.println("Assertion error printing debugging output: " + x) } + + private[nsc] def repldbg(msg: => String) = if (isReplDebug) echo(msg) + private[nsc] def repltrace(msg: => String) = if (isReplTrace) echo(msg) + private[nsc] def replinfo(msg: => String) = if (isReplInfo) echo(msg) + + private[nsc] def logAndDiscard[T](label: String, alt: => T): PartialFunction[Throwable, T] = { + case t => + repldbg(label + ": " + unwrap(t)) + repltrace(stackTraceString(unwrap(t))) + alt + } + private[nsc] def substituteAndLog[T](alt: => T)(body: => T): T = + substituteAndLog("" + alt, alt)(body) + private[nsc] def substituteAndLog[T](label: String, alt: => T)(body: => T): T = { + try body + catch logAndDiscard(label, alt) + } + private[nsc] def squashAndLog(label: String)(body: => Unit): Unit = + substituteAndLog(label, ())(body) + + def isReplTrace: Boolean = replProps.trace + def isReplDebug: Boolean = replProps.debug || isReplTrace + def isReplInfo: Boolean = replProps.info || isReplDebug + def isReplPower: Boolean = replProps.power +} diff --git a/test/files/pos/t5604/ReplReporter.scala b/test/files/pos/t5604/ReplReporter.scala new file mode 100644 index 0000000000..130af990ad --- /dev/null +++ b/test/files/pos/t5604/ReplReporter.scala @@ -0,0 +1,30 @@ +/* NSC -- new Scala compiler + * Copyright 2002-2011 LAMP/EPFL + * @author Paul Phillips + */ + +package scala.tools.nsc +package interpreter + +import reporters._ +import IMain._ + +class ReplReporter(intp: IMain) extends ConsoleReporter(intp.settings, Console.in, new ReplStrippingWriter(intp)) { + override def printMessage(msg: String) { + // Avoiding deadlock if the compiler starts logging before + // the lazy val is complete. + if (intp.isInitializeComplete) { + if (intp.totalSilence) { + if (isReplTrace) + super.printMessage("[silent] " + msg) + } + else super.printMessage(msg) + } + else Console.println("[init] " + msg) + } + + override def displayPrompt() { + if (intp.totalSilence) () + else super.displayPrompt() + } +} -- cgit v1.2.3 From 47318105010786bc6eba835c957ce3cd4fe88d70 Mon Sep 17 00:00:00 2001 From: Aleksandar Prokopec Date: Wed, 28 Mar 2012 19:17:49 +0200 Subject: Work on source compatibility between akka and scala futures. Removed some methods from execution contexts. Changed Awaitable interface. --- src/library/scala/concurrent/Awaitable.scala | 9 ++- .../scala/concurrent/ConcurrentPackageObject.scala | 14 ++-- .../scala/concurrent/ExecutionContext.scala | 91 +--------------------- src/library/scala/concurrent/Future.scala | 31 +++----- src/library/scala/concurrent/Promise.scala | 16 ++-- .../concurrent/impl/ExecutionContextImpl.scala | 84 +------------------- src/library/scala/concurrent/impl/Future.scala | 74 +++++++++++++++++- src/library/scala/concurrent/impl/Promise.scala | 29 +++---- src/library/scala/concurrent/package.scala | 28 +++---- test/files/jvm/scala-concurrent-tck.scala | 2 +- 10 files changed, 136 insertions(+), 242 deletions(-) (limited to 'test/files') diff --git a/src/library/scala/concurrent/Awaitable.scala b/src/library/scala/concurrent/Awaitable.scala index 6c9995eb05..052e6e2366 100644 --- a/src/library/scala/concurrent/Awaitable.scala +++ b/src/library/scala/concurrent/Awaitable.scala @@ -16,8 +16,13 @@ import scala.concurrent.util.Duration trait Awaitable[+T] { - @implicitNotFound(msg = "Waiting must be done by calling `blocking(timeout) b`, where `b` is the `Awaitable` object or a potentially blocking piece of code.") - def await(atMost: Duration)(implicit canawait: CanAwait): T + def ready(atMost: Duration)(implicit permit: CanAwait): this.type + + /** + * Throws exceptions if cannot produce a T within the specified time + * This method should not be called directly. + */ + def result(atMost: Duration)(implicit permit: CanAwait): T } diff --git a/src/library/scala/concurrent/ConcurrentPackageObject.scala b/src/library/scala/concurrent/ConcurrentPackageObject.scala index 3471095051..ba98757906 100644 --- a/src/library/scala/concurrent/ConcurrentPackageObject.scala +++ b/src/library/scala/concurrent/ConcurrentPackageObject.scala @@ -59,14 +59,18 @@ abstract class ConcurrentPackageObject { /* concurrency constructs */ def future[T](body: =>T)(implicit execCtx: ExecutionContext = executionContext): Future[T] = - execCtx future body + Future[T](body) def promise[T]()(implicit execCtx: ExecutionContext = executionContext): Promise[T] = - execCtx promise + Promise[T]() /** Wraps a block of code into an awaitable object. */ def body2awaitable[T](body: =>T) = new Awaitable[T] { - def await(atMost: Duration)(implicit cb: CanAwait) = body + def ready(atMost: Duration)(implicit permit: CanAwait) = { + body + this + } + def result(atMost: Duration)(implicit permit: CanAwait) = body } /** Used to block on a piece of code which potentially blocks. @@ -78,8 +82,8 @@ abstract class ConcurrentPackageObject { * - InterruptedException - in the case that a wait within the blockable object was interrupted * - TimeoutException - in the case that the blockable object timed out */ - def blocking[T](atMost: Duration)(body: =>T)(implicit execCtx: ExecutionContext): T = - executionContext.blocking(atMost)(body) + def blocking[T](body: =>T)(implicit execCtx: ExecutionContext): T = + executionContext.blocking(body) /** Blocks on an awaitable object. * diff --git a/src/library/scala/concurrent/ExecutionContext.scala b/src/library/scala/concurrent/ExecutionContext.scala index c4a45f9fb5..a206a2d4ea 100644 --- a/src/library/scala/concurrent/ExecutionContext.scala +++ b/src/library/scala/concurrent/ExecutionContext.scala @@ -22,19 +22,11 @@ import collection._ trait ExecutionContext { - protected implicit object CanAwaitEvidence extends CanAwait - def execute(runnable: Runnable): Unit def execute[U](body: () => U): Unit - def promise[T]: Promise[T] - - def future[T](body: Callable[T]): Future[T] = future(body.call()) - - def future[T](body: => T): Future[T] - - def blocking[T](atMost: Duration)(body: =>T): T + def blocking[T](body: =>T): T def blocking[T](awaitable: Awaitable[T], atMost: Duration): T @@ -44,89 +36,8 @@ trait ExecutionContext { private implicit val executionContext = this - def keptPromise[T](result: T): Promise[T] = { - val p = promise[T] - p success result - } - - def brokenPromise[T](t: Throwable): Promise[T] = { - val p = promise[T] - p failure t - } - - /** TODO some docs - * - */ - def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]]): Future[Coll[T]] = { - import nondeterministic._ - val buffer = new mutable.ArrayBuffer[T] - val counter = new AtomicInteger(1) // how else could we do this? - val p: Promise[Coll[T]] = promise[Coll[T]] // we need an implicit execctx in the signature - var idx = 0 - - def tryFinish() = if (counter.decrementAndGet() == 0) { - val builder = cbf(futures) - builder ++= buffer - p success builder.result - } - - for (f <- futures) { - val currentIndex = idx - buffer += null.asInstanceOf[T] - counter.incrementAndGet() - f onComplete { - case Failure(t) => - p tryFailure t - case Success(v) => - buffer(currentIndex) = v - tryFinish() - } - idx += 1 - } - - tryFinish() - - p.future - } - - /** TODO some docs - * - */ - def any[T](futures: Traversable[Future[T]]): Future[T] = { - val p = promise[T] - val completeFirst: Try[T] => Unit = elem => p tryComplete elem - - futures foreach (_ onComplete completeFirst) - - p.future - } - - /** TODO some docs - * - */ - def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean): Future[Option[T]] = { - if (futures.isEmpty) Promise.kept[Option[T]](None).future - else { - val result = promise[Option[T]] - val count = new AtomicInteger(futures.size) - val search: Try[T] => Unit = { - v => v match { - case Success(r) => if (predicate(r)) result trySuccess Some(r) - case _ => - } - if (count.decrementAndGet() == 0) result trySuccess None - } - - futures.foreach(_ onComplete search) - - result.future - } - } - } -sealed trait CanAwait - diff --git a/src/library/scala/concurrent/Future.scala b/src/library/scala/concurrent/Future.scala index 1dc8e38355..fa4c61c227 100644 --- a/src/library/scala/concurrent/Future.scala +++ b/src/library/scala/concurrent/Future.scala @@ -235,8 +235,8 @@ self => * val f = future { 5 } * val g = f filter { _ % 2 == 1 } * val h = f filter { _ % 2 == 0 } - * await(0) g // evaluates to 5 - * await(0) h // throw a NoSuchElementException + * await(g, 0) // evaluates to 5 + * await(h, 0) // throw a NoSuchElementException * }}} */ def filter(pred: T => Boolean): Future[T] = { @@ -272,8 +272,8 @@ self => * val h = f collect { * case x if x > 0 => x * 2 * } - * await(0) g // evaluates to 5 - * await(0) h // throw a NoSuchElementException + * await(g, 0) // evaluates to 5 + * await(h, 0) // throw a NoSuchElementException * }}} */ def collect[S](pf: PartialFunction[T, S]): Future[S] = { @@ -383,7 +383,7 @@ self => * val f = future { sys.error("failed") } * val g = future { 5 } * val h = f orElse g - * await(0) h // evaluates to 5 + * await(h, 0) // evaluates to 5 * }}} */ def fallbackTo[U >: T](that: Future[U]): Future[U] = { @@ -445,7 +445,7 @@ self => * val f = future { sys.error("failed") } * val g = future { 5 } * val h = f either g - * await(0) h // evaluates to either 5 or throws a runtime exception + * await(h, 0) // evaluates to either 5 or throws a runtime exception * }}} */ def either[U >: T](that: Future[U]): Future[U] = { @@ -466,26 +466,15 @@ self => -/** TODO some docs +/** Future companion object. * * @define nonDeterministic * Note: using this method yields nondeterministic dataflow programs. */ object Future { - - // TODO make more modular by encoding all other helper methods within the execution context - /** TODO some docs - */ - def all[T, Coll[X] <: Traversable[X]](futures: Coll[Future[T]])(implicit cbf: CanBuildFrom[Coll[_], T, Coll[T]], ec: ExecutionContext): Future[Coll[T]] = - ec.all[T, Coll](futures) - - // move this to future companion object - @inline def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = executor.future(body) - - def any[T](futures: Traversable[Future[T]])(implicit ec: ExecutionContext): Future[T] = ec.any(futures) - - def find[T](futures: Traversable[Future[T]])(predicate: T => Boolean)(implicit ec: ExecutionContext): Future[Option[T]] = ec.find(futures)(predicate) - + + def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = impl.Future(body) + } diff --git a/src/library/scala/concurrent/Promise.scala b/src/library/scala/concurrent/Promise.scala index 4404e90971..61e21606e6 100644 --- a/src/library/scala/concurrent/Promise.scala +++ b/src/library/scala/concurrent/Promise.scala @@ -30,8 +30,6 @@ import scala.util.{ Try, Success, Failure } */ trait Promise[T] { - import nondeterministic._ - /** Future containing the value of this promise. */ def future: Future[T] @@ -114,12 +112,18 @@ trait Promise[T] { object Promise { - def kept[T](result: T)(implicit execctx: ExecutionContext): Promise[T] = - execctx keptPromise result + /** Creates a new promise. + */ + def apply[T]()(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.DefaultPromise[T]() - def broken[T](t: Throwable)(implicit execctx: ExecutionContext): Promise[T] = - execctx brokenPromise t + /** Creates an already completed Promise with the specified exception + */ + def failed[T](exception: Throwable)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Failure(exception)) + /** Creates an already completed Promise with the specified result + */ + def successful[T](result: T)(implicit executor: ExecutionContext): Promise[T] = new impl.Promise.KeptPromise[T](Success(result)) + } diff --git a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala index 8ac745fd25..5dc440f42b 100644 --- a/src/library/scala/concurrent/impl/ExecutionContextImpl.scala +++ b/src/library/scala/concurrent/impl/ExecutionContextImpl.scala @@ -15,7 +15,6 @@ import scala.concurrent.forkjoin._ import scala.concurrent.{ExecutionContext, resolver, Awaitable, body2awaitable} import scala.util.{ Try, Success, Failure } import scala.concurrent.util.{ Duration } -import scala.collection.mutable.Stack @@ -38,32 +37,12 @@ private[scala] class ExecutionContextImpl(val executorService: AnyRef) extends E def run() = body() }) - def promise[T]: Promise[T] = new Promise.DefaultPromise[T]()(this) - - def future[T](body: =>T): Future[T] = { - val p = promise[T] - - dispatchFuture { - () => - p complete { - try { - Success(body) - } catch { - case e => resolver(e) - } - } - } - - p.future - } - - def blocking[T](atMost: Duration)(body: =>T): T = blocking(body2awaitable(body), atMost) + def blocking[T](body: =>T): T = blocking(body2awaitable(body), Duration.fromNanos(0)) def blocking[T](awaitable: Awaitable[T], atMost: Duration): T = { - currentExecutionContext.get match { - case null => awaitable.await(atMost)(null) // outside - TODO - fix timeout case - case x => x.blockingCall(awaitable) // inside an execution context thread - } + Future.releaseStack(this) + + awaitable.result(atMost)(scala.concurrent.Await.canAwaitEvidence) } def reportFailure(t: Throwable) = t match { @@ -71,61 +50,6 @@ private[scala] class ExecutionContextImpl(val executorService: AnyRef) extends E case t => t.printStackTrace() } - /** Only callable from the tasks running on the same execution context. */ - private def blockingCall[T](body: Awaitable[T]): T = { - releaseStack() - - // TODO see what to do with timeout - body.await(Duration.fromNanos(0))(CanAwaitEvidence) - } - - // an optimization for batching futures - // TODO we should replace this with a public queue, - // so that it can be stolen from - // OR: a push to the local task queue should be so cheap that this is - // not even needed, but stealing is still possible - private val _taskStack = new ThreadLocal[Stack[() => Unit]]() - - private def releaseStack(): Unit = - _taskStack.get match { - case stack if (stack ne null) && stack.nonEmpty => - val tasks = stack.elems - stack.clear() - _taskStack.remove() - dispatchFuture(() => _taskStack.get.elems = tasks, true) - case null => - // do nothing - there is no local batching stack anymore - case _ => - _taskStack.remove() - } - - private[impl] def dispatchFuture(task: () => Unit, force: Boolean = false): Unit = - _taskStack.get match { - case stack if (stack ne null) && !force => stack push task - case _ => this.execute( - new Runnable { - def run() { - try { - val taskStack = Stack[() => Unit](task) - _taskStack set taskStack - while (taskStack.nonEmpty) { - val next = taskStack.pop() - try { - next.apply() - } catch { - case e => - // TODO catching all and continue isn't good for OOME - reportFailure(e) - } - } - } finally { - _taskStack.remove() - } - } - } - ) - } - } diff --git a/src/library/scala/concurrent/impl/Future.scala b/src/library/scala/concurrent/impl/Future.scala index b4385ea34a..6833b2467f 100644 --- a/src/library/scala/concurrent/impl/Future.scala +++ b/src/library/scala/concurrent/impl/Future.scala @@ -8,13 +8,17 @@ package scala.concurrent.impl + + import scala.concurrent.{Awaitable, ExecutionContext} import scala.util.{ Try, Success, Failure } -//import scala.util.continuations._ +import scala.collection.mutable.Stack + + private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awaitable[T] { - implicit def executor: ExecutionContextImpl + implicit def executor: ExecutionContext /** For use only within a Future.flow block or another compatible Delimited Continuations reset block. * @@ -40,7 +44,7 @@ private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awa * that conforms to A's erased type or a ClassCastException otherwise. */ final def mapTo[T](implicit m: Manifest[T]) = { - val p = executor.promise[T] + val p = new Promise.DefaultPromise[T] onComplete { case f @ Failure(t) => p complete f.asInstanceOf[Try[T]] @@ -48,7 +52,7 @@ private[concurrent] trait Future[+T] extends scala.concurrent.Future[T] with Awa p complete (try { Success(Future.boxedType(m.erasure).cast(v).asInstanceOf[T]) } catch { - case e: ClassCastException ⇒ Failure(e) + case e: ClassCastException => Failure(e) }) } @@ -86,4 +90,66 @@ object Future { def boxedType(c: Class[_]): Class[_] = { if (c.isPrimitive) toBoxed(c) else c } + + def apply[T](body: =>T)(implicit executor: ExecutionContext): Future[T] = { + val promise = new Promise.DefaultPromise[T]() + executor.execute(new Runnable { + def run = { + promise complete { + try { + Success(body) + } catch { + case e => scala.concurrent.resolver(e) + } + } + } + }) + promise.future + } + + // an optimization for batching futures + // TODO we should replace this with a public queue, + // so that it can be stolen from + // OR: a push to the local task queue should be so cheap that this is + // not even needed, but stealing is still possible + private val _taskStack = new ThreadLocal[Stack[() => Unit]]() + + private[impl] def releaseStack(executor: ExecutionContext): Unit = + _taskStack.get match { + case stack if (stack ne null) && stack.nonEmpty => + val tasks = stack.elems + stack.clear() + _taskStack.remove() + dispatchFuture(executor, () => _taskStack.get.elems = tasks, true) + case null => + // do nothing - there is no local batching stack anymore + case _ => + _taskStack.remove() + } + + private[impl] def dispatchFuture(executor: ExecutionContext, task: () => Unit, force: Boolean = false): Unit = + _taskStack.get match { + case stack if (stack ne null) && !force => stack push task + case _ => executor.execute(new Runnable { + def run() { + try { + val taskStack = Stack[() => Unit](task) + _taskStack set taskStack + while (taskStack.nonEmpty) { + val next = taskStack.pop() + try { + next.apply() + } catch { + case e => + // TODO catching all and continue isn't good for OOME + executor.reportFailure(e) + } + } + } finally { + _taskStack.remove() + } + } + }) + } + } diff --git a/src/library/scala/concurrent/impl/Promise.scala b/src/library/scala/concurrent/impl/Promise.scala index 4a983b5001..c79b0d02cc 100644 --- a/src/library/scala/concurrent/impl/Promise.scala +++ b/src/library/scala/concurrent/impl/Promise.scala @@ -26,7 +26,7 @@ private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with Fu def future = this - def newPromise[S]: Promise[S] = executor promise + def newPromise[S]: scala.concurrent.Promise[S] = new Promise.DefaultPromise() // TODO refine answer and return types here from Any to type parameters // then move this up in the hierarchy @@ -75,6 +75,7 @@ private[concurrent] trait Promise[T] extends scala.concurrent.Promise[T] with Fu object Promise { + def dur2long(dur: Duration): Long = if (dur.isFinite) dur.toNanos else Long.MaxValue def EmptyPending[T](): FState[T] = emptyPendingValue.asInstanceOf[FState[T]] @@ -101,7 +102,7 @@ object Promise { /** Default promise implementation. */ - class DefaultPromise[T](implicit val executor: ExecutionContextImpl) extends AbstractPromise with Promise[T] { + class DefaultPromise[T](implicit val executor: ExecutionContext) extends AbstractPromise with Promise[T] { self => updater.set(this, Promise.EmptyPending()) @@ -126,14 +127,14 @@ object Promise { value.isDefined } - executor.blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), Duration.fromNanos(0)) + executor.blocking(concurrent.body2awaitable(awaitUnsafe(dur2long(atMost))), atMost) } - private def ready(atMost: Duration)(implicit permit: CanAwait): this.type = + def ready(atMost: Duration)(implicit permit: CanAwait): this.type = if (value.isDefined || tryAwait(atMost)) this else throw new TimeoutException("Futures timed out after [" + atMost.toMillis + "] milliseconds") - def await(atMost: Duration)(implicit permit: CanAwait): T = + def result(atMost: Duration)(implicit permit: CanAwait): T = ready(atMost).value.get match { case util.Failure(e) => throw e case util.Success(r) => r @@ -176,9 +177,9 @@ object Promise { case null => false case cs if cs.isEmpty => true case cs => - executor dispatchFuture { + Future.dispatchFuture(executor, { () => cs.foreach(f => notifyCompleted(f, value)) - } + }) true } } @@ -197,9 +198,9 @@ object Promise { if (tryAddCallback()) { val result = value.get - executor dispatchFuture { + Future.dispatchFuture(executor, { () => notifyCompleted(func, result) - } + }) } this @@ -218,22 +219,22 @@ object Promise { * * Useful in Future-composition when a value to contribute is already available. */ - final class KeptPromise[T](suppliedValue: Try[T])(implicit val executor: ExecutionContextImpl) extends Promise[T] { + final class KeptPromise[T](suppliedValue: Try[T])(implicit val executor: ExecutionContext) extends Promise[T] { val value = Some(resolve(suppliedValue)) def tryComplete(value: Try[T]): Boolean = false def onComplete[U](func: Try[T] => U): this.type = { val completedAs = value.get - executor dispatchFuture { + Future.dispatchFuture(executor, { () => func(completedAs) - } + }) this } - private def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this + def ready(atMost: Duration)(implicit permit: CanAwait): this.type = this - def await(atMost: Duration)(implicit permit: CanAwait): T = value.get match { + def result(atMost: Duration)(implicit permit: CanAwait): T = value.get match { case util.Failure(e) => throw e case util.Success(r) => r } diff --git a/src/library/scala/concurrent/package.scala b/src/library/scala/concurrent/package.scala index 204b3f2673..e2ae17498f 100644 --- a/src/library/scala/concurrent/package.scala +++ b/src/library/scala/concurrent/package.scala @@ -20,27 +20,17 @@ package object concurrent extends scala.concurrent.ConcurrentPackageObject { } package concurrent { - object await { - def ready[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): Awaitable[T] = { - try blocking(awaitable, atMost) - catch { case _ => } - awaitable - } - - def result[T](atMost: Duration)(awaitable: Awaitable[T])(implicit execCtx: ExecutionContext = executionContext): T = { - blocking(awaitable, atMost) - } + + sealed trait CanAwait + + object Await { + private[concurrent] implicit val canAwaitEvidence = new CanAwait {} + + def ready[T](awaitable: Awaitable[T], atMost: Duration): Awaitable[T] = awaitable.ready(atMost) + + def result[T](awaitable: Awaitable[T], atMost: Duration): T = awaitable.result(atMost) } - /** Importing this object allows using some concurrency primitives - * on futures and promises that can yield nondeterministic programs. - * - * While program determinism is broken when using these primitives, - * some programs cannot be written without them (e.g. multiple client threads - * cannot send requests to a server thread through regular promises and futures). - */ - object nondeterministic { } - /** A timeout exception. * * Futures are failed with a timeout exception when their timeout expires. diff --git a/test/files/jvm/scala-concurrent-tck.scala b/test/files/jvm/scala-concurrent-tck.scala index 70221c0de1..75e2b92ff6 100644 --- a/test/files/jvm/scala-concurrent-tck.scala +++ b/test/files/jvm/scala-concurrent-tck.scala @@ -74,7 +74,7 @@ trait FutureCallbacks extends TestBase { done() throw new Exception } - f onSuccess { + f onSuccess { case _ => assert(false) } } -- cgit v1.2.3 From 97f03245d9646b9ade43418dee7dc0d2a6203ce7 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 28 Mar 2012 12:57:34 -0700 Subject: Revived the lisp test. All hail the lisp test! Closes SI-4579. --- test/disabled/run/lisp.check | 26 --- test/disabled/run/lisp.scala | 518 ------------------------------------------- test/files/run/lisp.check | 25 +++ test/files/run/lisp.scala | 518 +++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 543 insertions(+), 544 deletions(-) delete mode 100644 test/disabled/run/lisp.check delete mode 100644 test/disabled/run/lisp.scala create mode 100644 test/files/run/lisp.check create mode 100644 test/files/run/lisp.scala (limited to 'test/files') diff --git a/test/disabled/run/lisp.check b/test/disabled/run/lisp.check deleted file mode 100644 index 64053f26d0..0000000000 --- a/test/disabled/run/lisp.check +++ /dev/null @@ -1,26 +0,0 @@ -(lambda (x) (+ (* x x) 1)) -(lambda (x) (+ (* x x) 1)) - -( '(1 2 3)) = (1 2 3) -(car '(1 2 3)) = 1 -(cdr '(1 2 3)) = (2 3) -(null? '(2 3)) = 0 -(null? '()) = 1 - -faculty(10) = 3628800 -faculty(10) = 3628800 -foobar = ("a" "bc" "def" "z") - -List('lambda, List('x), List('+, List('*, 'x, 'x), 1)) -(lambda (x) (+ (* x x) 1)) - -( '(1 2 3)) = (1 2 3) -(car '(1 2 3)) = 1 -(cdr '(1 2 3)) = (2 3) -(null? '(2 3)) = 0 -(null? '()) = 1 - -faculty(10) = 3628800 -faculty(10) = 3628800 -foobar = ("a" "bc" "def" "z") - diff --git a/test/disabled/run/lisp.scala b/test/disabled/run/lisp.scala deleted file mode 100644 index 06e68f508a..0000000000 --- a/test/disabled/run/lisp.scala +++ /dev/null @@ -1,518 +0,0 @@ -//############################################################################ -// Lisp interpreter -//############################################################################ - -//############################################################################ -// Lisp Scanner - -class LispTokenizer(s: String) extends Iterator[String] { - private var i = 0; - private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')' - def hasNext: Boolean = { - while (i < s.length() && s.charAt(i) <= ' ') i += 1 - i < s.length() - } - def next: String = - if (hasNext) { - val start = i - if (isDelimiter(s charAt i)) i += 1 - else - do i = i + 1 - while (!isDelimiter(s charAt i)) - s.substring(start, i) - } else sys.error("premature end of string") -} - -//############################################################################ -// Lisp Interface - -trait Lisp { - type Data - - def string2lisp(s: String): Data - def lisp2string(s: Data): String - - def evaluate(d: Data): Data - // !!! def evaluate(s: String): Data = evaluate(string2lisp(s)) - def evaluate(s: String): Data -} - -//############################################################################ -// Lisp Implementation Using Case Classes - -object LispCaseClasses extends Lisp { - - import List.range - - trait Data { - def elemsToString(): String = toString(); - } - case class CONS(car: Data, cdr: Data) extends Data { - override def toString() = "(" + elemsToString() + ")"; - override def elemsToString() = car.toString() + (cdr match { - case NIL() => "" - case _ => " " + cdr.elemsToString(); - }) - } - case class NIL() extends Data { // !!! use case object - override def toString() = "()"; - } - case class SYM(name: String) extends Data { - override def toString() = name; - } - case class NUM(x: Int) extends Data { - override def toString() = x.toString(); - } - case class STR(x: String) extends Data { - override def toString() = "\"" + x + "\""; - } - case class FUN(f: List[Data] => Data) extends Data { - override def toString() = ""; - } - - def list(): Data = - NIL(); - def list(x0: Data): Data = - CONS(x0, NIL()); - def list(x0: Data, x1: Data): Data = - CONS(x0, list(x1)); - def list(x0: Data, x1: Data, x2: Data): Data = - CONS(x0, list(x1, x2)); - def list(x0: Data, x1: Data, x2: Data, x3: Data): Data = - CONS(x0, list(x1, x2, x3)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data = - CONS(x0, list(x1, x2, x3, x4)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data, x8: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data, x8: Data, x9: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9)); - - var curexp: Data = null - var trace: Boolean = false - var indent: Int = 0 - - def lispError[a](msg: String): a = - sys.error("error: " + msg + "\n" + curexp); - - trait Environment { - def lookup(n: String): Data; - def extendRec(name: String, expr: Environment => Data) = - new Environment { - def lookup(n: String): Data = - if (n == name) expr(this) else Environment.this.lookup(n); - } - def extend(name: String, v: Data) = extendRec(name, (env1 => v)); - } - val EmptyEnvironment = new Environment { - def lookup(n: String): Data = lispError("undefined: " + n); - } - - def toList(x: Data): List[Data] = x match { - case NIL() => List() - case CONS(y, ys) => y :: toList(ys) - case _ => lispError("malformed list: " + x); - } - - def toBoolean(x: Data) = x match { - case NUM(0) => false - case _ => true - } - - def normalize(x: Data): Data = x match { - case CONS(SYM("def"), - CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) => - normalize(list(SYM("def"), - SYM(name), list(SYM("lambda"), args, body), expr)) - case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) => - normalize(expr) - case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) => - normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest))) - case CONS(h, t) => CONS(normalize(h), normalize(t)) - case _ => x - } - - def eval(x: Data, env: Environment): Data = { - val prevexp = curexp; - curexp = x; - if (trace) { - for (x <- range(1, indent)) Console.print(" "); - Console.println("===> " + x); - indent = indent + 1; - } - val result = eval1(x, env); - if (trace) { - indent = indent - 1; - for (x <- range(1, indent)) Console.print(" "); - Console.println("<=== " + result); - } - curexp = prevexp; - result - } - - def eval1(x: Data, env: Environment): Data = x match { - case SYM(name) => - env lookup name - case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => - eval(z, env.extendRec(name, (env1 => eval(y, env1)))) - case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => - eval(z, env.extend(name, eval(y, env))) - case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) => - mkLambda(params, y, env) - case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) => - if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env) - case CONS(SYM("quote"), CONS(x, NIL())) => - x - case CONS(y, xs) => - apply(eval(y, env), toList(xs) map (x => eval(x, env))) - case NUM(_) => x - case STR(_) => x - case FUN(_) => x - case _ => - lispError("illegal term") - } - - def apply(fn: Data, args: List[Data]): Data = fn match { - case FUN(f) => f(args); - case _ => lispError("application of non-function: " + fn); - } - - def mkLambda(params: Data, expr: Data, env: Environment): Data = { - - def extendEnv(env: Environment, - ps: List[String], args: List[Data]): Environment = - Pair(ps, args) match { - case Pair(List(), List()) => - env - case Pair(p :: ps1, arg :: args1) => - extendEnv(env.extend(p, arg), ps1, args1) - case _ => - lispError("wrong number of arguments") - } - - val ps: List[String] = toList(params) map { - case SYM(name) => name - case _ => sys.error("illegal parameter list"); - } - - FUN(args => eval(expr, extendEnv(env, ps, args))) - } - - val globalEnv = EmptyEnvironment - .extend("=", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0) - case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)})) - .extend("+", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2) - case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)})) - .extend("-", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)})) - .extend("*", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)})) - .extend("/", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)})) - .extend("car", FUN({ - case List(CONS(x, xs)) => x})) - .extend("cdr", FUN({ - case List(CONS(x, xs)) => xs})) - .extend("null?", FUN({ - case List(NIL()) => NUM(1) - case _ => NUM(0)})) - .extend("cons", FUN({ - case List(x, y) => CONS(x, y)})); - - def evaluate(x: Data): Data = eval(normalize(x), globalEnv); - def evaluate(s: String): Data = evaluate(string2lisp(s)); - - def string2lisp(s: String): Data = { - val it = new LispTokenizer(s); - def parseExpr(token: String): Data = { - if (token == "(") parseList - else if (token == ")") sys.error("unbalanced parentheses") - else if ('0' <= token.charAt(0) && token.charAt(0) <= '9') - NUM(token.toInt) - else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') - STR(token.substring(1,token.length() - 1)) - else SYM(token) - } - def parseList: Data = { - val token = it.next; - if (token == ")") NIL() else CONS(parseExpr(token), parseList) - } - parseExpr(it.next) - } - - def lisp2string(d: Data): String = d.toString(); -} - -//############################################################################ -// Lisp Implementation Using Any - -object LispAny extends Lisp { - - import List._; - - type Data = Any; - - case class Lambda(f: List[Data] => Data); - - var curexp: Data = null; - var trace: Boolean = false; - var indent: Int = 0; - - def lispError[a](msg: String): a = - sys.error("error: " + msg + "\n" + curexp); - - trait Environment { - def lookup(n: String): Data; - def extendRec(name: String, expr: Environment => Data) = - new Environment { - def lookup(n: String): Data = - if (n == name) expr(this) else Environment.this.lookup(n); - } - def extend(name: String, v: Data) = extendRec(name, (env1 => v)); - } - val EmptyEnvironment = new Environment { - def lookup(n: String): Data = lispError("undefined: " + n); - } - - def asList(x: Data): List[Data] = x match { - case y: List[_] => y - case _ => lispError("malformed list: " + x) - } - - def asInt(x: Data): Int = x match { - case y: Int => y - case _ => lispError("not an integer: " + x) - } - - def asString(x: Data): String = x match { - case y: String => y - case _ => lispError("not a string: " + x) - } - - def asBoolean(x: Data): Boolean = x != 0 - - def normalize(x: Data): Data = x match { - case 'and :: x :: y :: Nil => - normalize('if :: x :: y :: 0 :: Nil) - case 'or :: x :: y :: Nil => - normalize('if :: x :: 1 :: y :: Nil) - case 'def :: (name :: args) :: body :: expr :: Nil => - normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil) - case 'cond :: ('else :: expr :: Nil) :: rest => - normalize(expr); - case 'cond :: (test :: expr :: Nil) :: rest => - normalize('if :: test :: expr :: ('cond :: rest) :: Nil) - case 'cond :: 'else :: expr :: Nil => - normalize(expr) - case h :: t => - normalize(h) :: asList(normalize(t)) - case _ => - x - } - - def eval(x: Data, env: Environment): Data = { - val prevexp = curexp; - curexp = x; - if (trace) { - for (x <- range(1, indent)) Console.print(" "); - Console.println("===> " + x); - indent += 1; - } - val result = eval1(x, env); - if (trace) { - indent -= 1; - for (x <- range(1, indent)) Console.print(" "); - Console.println("<=== " + result); - } - curexp = prevexp; - result - } - - def eval1(x: Data, env: Environment): Data = x match { - case Symbol(name) => - env lookup name - case 'def :: Symbol(name) :: y :: z :: Nil => - eval(z, env.extendRec(name, (env1 => eval(y, env1)))) - case 'val :: Symbol(name) :: y :: z :: Nil => - eval(z, env.extend(name, eval(y, env))) - case 'lambda :: params :: y :: Nil => - mkLambda(params, y, env) - case 'if :: c :: y :: z :: Nil => - if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env) - case 'quote :: y :: Nil => - y - case y :: z => - apply(eval(y, env), z map (x => eval(x, env))) - case Lambda(_) => x - case y: String => x - case y: Int => x - case y => lispError("illegal term") - } - - def lisp2string(x: Data): String = x match { - case Symbol(name) => name - case Nil => "()" - case y :: ys => - def list2string(xs: List[Data]): String = xs match { - case List() => "" - case y :: ys => " " + lisp2string(y) + list2string(ys) - } - "(" + lisp2string(y) + list2string(ys) + ")" - case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString() - } - - def apply(fn: Data, args: List[Data]): Data = fn match { - case Lambda(f) => f(args); - case _ => lispError("application of non-function: " + fn + " to " + args); - } - - def mkLambda(params: Data, expr: Data, env: Environment): Data = { - - def extendEnv(env: Environment, - ps: List[String], args: List[Data]): Environment = - Pair(ps, args) match { - case Pair(List(), List()) => - env - case Pair(p :: ps1, arg :: args1) => - extendEnv(env.extend(p, arg), ps1, args1) - case _ => - lispError("wrong number of arguments") - } - - val ps: List[String] = asList(params) map { - case Symbol(name) => name - case _ => sys.error("illegal parameter list"); - } - - Lambda(args => eval(expr, extendEnv(env, ps, args))) - } - - val globalEnv = EmptyEnvironment - .extend("=", Lambda{ - case List(arg1, arg2) => if(arg1 == arg2) 1 else 0}) - .extend("+", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 + arg2 - case List(arg1: String, arg2: String) => arg1 + arg2}) - .extend("-", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 - arg2}) - .extend("*", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 * arg2}) - .extend("/", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 / arg2}) - .extend("nil", Nil) - .extend("cons", Lambda{ - case List(arg1, arg2) => arg1 :: asList(arg2)}) - .extend("car", Lambda{ - case List(x :: xs) => x}) - .extend("cdr", Lambda{ - case List(x :: xs) => xs}) - .extend("null?", Lambda{ - case List(Nil) => 1 - case _ => 0}); - - def evaluate(x: Data): Data = eval(normalize(x), globalEnv); - def evaluate(s: String): Data = evaluate(string2lisp(s)); - - def string2lisp(s: String): Data = { - val it = new LispTokenizer(s); - def parseExpr(token: String): Data = { - if (token == "(") parseList - else if (token == ")") sys.error("unbalanced parentheses") - //else if (Character.isDigit(token.charAt(0))) - else if (token.charAt(0).isDigit) - token.toInt - else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') - token.substring(1,token.length() - 1) - else Symbol(token) - } - def parseList: List[Data] = { - val token = it.next; - if (token == ")") Nil else parseExpr(token) :: parseList - } - parseExpr(it.next) - } -} - -//############################################################################ -// List User - -class LispUser(lisp: Lisp) { - - import lisp._; - - def evaluate(s: String) = lisp2string(lisp.evaluate(s)); - - def run = { - - Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]); - Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))"))); - Console.println; - - Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))")); - Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))")); - Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))")); - Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))")); - Console.println("(null? '()) = " + evaluate("(null? (quote()))")); - Console.println; - - Console.println("faculty(10) = " + evaluate( - "(def (faculty n) " + - "(if (= n 0) " + - "1 " + - "(* n (faculty (- n 1)))) " + - "(faculty 10))")); - Console.println("faculty(10) = " + evaluate( - "(def (faculty n) " + - "(cond " + - "((= n 0) 1) " + - "(else (* n (faculty (- n 1))))) " + - "(faculty 10))")); - Console.println("foobar = " + evaluate( - "(def (foo n) " + - "(cond " + - "((= n 0) \"a\")" + - "((= n 1) \"b\")" + - "((= (/ n 2) 1) " + - "(cond " + - "((= n 2) \"c\")" + - "(else \"d\")))" + - "(else " + - "(def (bar m) " + - "(cond " + - "((= m 0) \"e\")" + - "((= m 1) \"f\")" + - "(else \"z\"))" + - "(bar (- n 4)))))" + - "(val nil (quote ())" + - "(val v1 (foo 0) " + - "(val v2 (+ (foo 1) (foo 2)) " + - "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " + - "(val v4 (foo 6) " + - "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))")); - Console.println; - } -} - -//############################################################################ -// Main - -object Test { - def main(args: Array[String]) { - new LispUser(LispCaseClasses).run; - new LispUser(LispAny).run; - () - } -} - -//############################################################################ diff --git a/test/files/run/lisp.check b/test/files/run/lisp.check new file mode 100644 index 0000000000..38ca7b655d --- /dev/null +++ b/test/files/run/lisp.check @@ -0,0 +1,25 @@ +(lambda (x) (+ (* x x) 1)) +(lambda (x) (+ (* x x) 1)) + +( '(1 2 3)) = (1 2 3) +(car '(1 2 3)) = 1 +(cdr '(1 2 3)) = (2 3) +(null? '(2 3)) = 0 +(null? '()) = 1 + +faculty(10) = 3628800 +faculty(10) = 3628800 +foobar = ("a" "bc" "def" "z") + +List('lambda, List('x), List('+, List('*, 'x, 'x), 1)) +(lambda (x) (+ (* x x) 1)) + +( '(1 2 3)) = (1 2 3) +(car '(1 2 3)) = 1 +(cdr '(1 2 3)) = (2 3) +(null? '(2 3)) = 0 +(null? '()) = 1 + +faculty(10) = 3628800 +faculty(10) = 3628800 +foobar = ("a" "bc" "def" "z") diff --git a/test/files/run/lisp.scala b/test/files/run/lisp.scala new file mode 100644 index 0000000000..07f44e1151 --- /dev/null +++ b/test/files/run/lisp.scala @@ -0,0 +1,518 @@ +//############################################################################ +// Lisp interpreter +//############################################################################ + +//############################################################################ +// Lisp Scanner + +class LispTokenizer(s: String) extends Iterator[String] { + private var i = 0; + private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')' + def hasNext: Boolean = { + while (i < s.length() && s.charAt(i) <= ' ') i += 1 + i < s.length() + } + def next: String = + if (hasNext) { + val start = i + if (isDelimiter(s charAt i)) i += 1 + else + do i = i + 1 + while (!isDelimiter(s charAt i)) + s.substring(start, i) + } else sys.error("premature end of string") +} + +//############################################################################ +// Lisp Interface + +trait Lisp { + type Data + + def string2lisp(s: String): Data + def lisp2string(s: Data): String + + def evaluate(d: Data): Data + // !!! def evaluate(s: String): Data = evaluate(string2lisp(s)) + def evaluate(s: String): Data +} + +//############################################################################ +// Lisp Implementation Using Case Classes + +object LispCaseClasses extends Lisp { + + import List.range + + trait Data { + def elemsToString(): String = toString(); + } + case class CONS(car: Data, cdr: Data) extends Data { + override def toString() = "(" + elemsToString() + ")"; + override def elemsToString() = car.toString() + (cdr match { + case NIL() => "" + case _ => " " + cdr.elemsToString(); + }) + } + case class NIL() extends Data { // !!! use case object + override def toString() = "()"; + } + case class SYM(name: String) extends Data { + override def toString() = name; + } + case class NUM(x: Int) extends Data { + override def toString() = x.toString(); + } + case class STR(x: String) extends Data { + override def toString() = "\"" + x + "\""; + } + case class FUN(f: List[Data] => Data) extends Data { + override def toString() = ""; + } + + def list(): Data = + NIL(); + def list(x0: Data): Data = + CONS(x0, NIL()); + def list(x0: Data, x1: Data): Data = + CONS(x0, list(x1)); + def list(x0: Data, x1: Data, x2: Data): Data = + CONS(x0, list(x1, x2)); + def list(x0: Data, x1: Data, x2: Data, x3: Data): Data = + CONS(x0, list(x1, x2, x3)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data = + CONS(x0, list(x1, x2, x3, x4)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data, x8: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data, x8: Data, x9: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9)); + + var curexp: Data = null + var trace: Boolean = false + var indent: Int = 0 + + def lispError[a](msg: String): a = + sys.error("error: " + msg + "\n" + curexp); + + trait Environment { + def lookup(n: String): Data; + def extendRec(name: String, expr: Environment => Data) = + new Environment { + def lookup(n: String): Data = + if (n == name) expr(this) else Environment.this.lookup(n); + } + def extend(name: String, v: Data) = extendRec(name, (env1 => v)); + } + val EmptyEnvironment = new Environment { + def lookup(n: String): Data = lispError("undefined: " + n); + } + + def toList(x: Data): List[Data] = x match { + case NIL() => List() + case CONS(y, ys) => y :: toList(ys) + case _ => lispError("malformed list: " + x); + } + + def toBoolean(x: Data) = x match { + case NUM(0) => false + case _ => true + } + + def normalize(x: Data): Data = x match { + case CONS(SYM("def"), + CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) => + normalize(list(SYM("def"), + SYM(name), list(SYM("lambda"), args, body), expr)) + case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) => + normalize(expr) + case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) => + normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest))) + case CONS(h, t) => CONS(normalize(h), normalize(t)) + case _ => x + } + + def eval(x: Data, env: Environment): Data = { + val prevexp = curexp; + curexp = x; + if (trace) { + for (x <- range(1, indent)) Console.print(" "); + Console.println("===> " + x); + indent = indent + 1; + } + val result = eval1(x, env); + if (trace) { + indent = indent - 1; + for (x <- range(1, indent)) Console.print(" "); + Console.println("<=== " + result); + } + curexp = prevexp; + result + } + + def eval1(x: Data, env: Environment): Data = x match { + case SYM(name) => + env lookup name + case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => + eval(z, env.extendRec(name, (env1 => eval(y, env1)))) + case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => + eval(z, env.extend(name, eval(y, env))) + case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) => + mkLambda(params, y, env) + case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) => + if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env) + case CONS(SYM("quote"), CONS(x, NIL())) => + x + case CONS(y, xs) => + apply(eval(y, env), toList(xs) map (x => eval(x, env))) + case NUM(_) => x + case STR(_) => x + case FUN(_) => x + case _ => + lispError("illegal term") + } + + def apply(fn: Data, args: List[Data]): Data = fn match { + case FUN(f) => f(args); + case _ => lispError("application of non-function: " + fn); + } + + def mkLambda(params: Data, expr: Data, env: Environment): Data = { + + def extendEnv(env: Environment, + ps: List[String], args: List[Data]): Environment = + Pair(ps, args) match { + case Pair(List(), List()) => + env + case Pair(p :: ps1, arg :: args1) => + extendEnv(env.extend(p, arg), ps1, args1) + case _ => + lispError("wrong number of arguments") + } + + val ps: List[String] = toList(params) map { + case SYM(name) => name + case _ => sys.error("illegal parameter list"); + } + + FUN(args => eval(expr, extendEnv(env, ps, args))) + } + + val globalEnv = EmptyEnvironment + .extend("=", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0) + case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)})) + .extend("+", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2) + case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)})) + .extend("-", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)})) + .extend("*", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)})) + .extend("/", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)})) + .extend("car", FUN({ + case List(CONS(x, xs)) => x})) + .extend("cdr", FUN({ + case List(CONS(x, xs)) => xs})) + .extend("null?", FUN({ + case List(NIL()) => NUM(1) + case _ => NUM(0)})) + .extend("cons", FUN({ + case List(x, y) => CONS(x, y)})); + + def evaluate(x: Data): Data = eval(normalize(x), globalEnv); + def evaluate(s: String): Data = evaluate(string2lisp(s)); + + def string2lisp(s: String): Data = { + val it = new LispTokenizer(s); + def parseExpr(token: String): Data = { + if (token == "(") parseList + else if (token == ")") sys.error("unbalanced parentheses") + else if ('0' <= token.charAt(0) && token.charAt(0) <= '9') + NUM(token.toInt) + else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') + STR(token.substring(1,token.length() - 1)) + else SYM(token) + } + def parseList: Data = { + val token = it.next; + if (token == ")") NIL() else CONS(parseExpr(token), parseList) + } + parseExpr(it.next) + } + + def lisp2string(d: Data): String = d.toString(); +} + +//############################################################################ +// Lisp Implementation Using Any + +object LispAny extends Lisp { + + import List._; + + type Data = Any; + + case class Lambda(f: List[Data] => Data); + + var curexp: Data = null; + var trace: Boolean = false; + var indent: Int = 0; + + def lispError[a](msg: String): a = + sys.error("error: " + msg + "\n" + curexp); + + trait Environment { + def lookup(n: String): Data; + def extendRec(name: String, expr: Environment => Data) = + new Environment { + def lookup(n: String): Data = + if (n == name) expr(this) else Environment.this.lookup(n); + } + def extend(name: String, v: Data) = extendRec(name, (env1 => v)); + } + val EmptyEnvironment = new Environment { + def lookup(n: String): Data = lispError("undefined: " + n); + } + + def asList(x: Data): List[Data] = x match { + case y: List[_] => y + case _ => lispError("malformed list: " + x) + } + + def asInt(x: Data): Int = x match { + case y: Int => y + case _ => lispError("not an integer: " + x) + } + + def asString(x: Data): String = x match { + case y: String => y + case _ => lispError("not a string: " + x) + } + + def asBoolean(x: Data): Boolean = x != 0 + + def normalize(x: Data): Data = x match { + case 'and :: x :: y :: Nil => + normalize('if :: x :: y :: 0 :: Nil) + case 'or :: x :: y :: Nil => + normalize('if :: x :: 1 :: y :: Nil) + case 'def :: (name :: args) :: body :: expr :: Nil => + normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil) + case 'cond :: ('else :: expr :: Nil) :: rest => + normalize(expr); + case 'cond :: (test :: expr :: Nil) :: rest => + normalize('if :: test :: expr :: ('cond :: rest) :: Nil) + case 'cond :: 'else :: expr :: Nil => + normalize(expr) + case h :: t => + normalize(h) :: asList(normalize(t)) + case _ => + x + } + + def eval(x: Data, env: Environment): Data = { + val prevexp = curexp; + curexp = x; + if (trace) { + for (x <- range(1, indent)) Console.print(" "); + Console.println("===> " + x); + indent += 1; + } + val result = eval1(x, env); + if (trace) { + indent -= 1; + for (x <- range(1, indent)) Console.print(" "); + Console.println("<=== " + result); + } + curexp = prevexp; + result + } + + def eval1(x: Data, env: Environment): Data = x match { + case Symbol(name) => + env lookup name + case 'def :: Symbol(name) :: y :: z :: Nil => + eval(z, env.extendRec(name, (env1 => eval(y, env1)))) + case 'val :: Symbol(name) :: y :: z :: Nil => + eval(z, env.extend(name, eval(y, env))) + case 'lambda :: params :: y :: Nil => + mkLambda(params, y, env) + case 'if :: c :: y :: z :: Nil => + if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env) + case 'quote :: y :: Nil => + y + case y :: z => + apply(eval(y, env), z map (x => eval(x, env))) + case Lambda(_) => x + case y: String => x + case y: Int => x + case y => lispError("illegal term") + } + + def lisp2string(x: Data): String = x match { + case Symbol(name) => name + case Nil => "()" + case y :: ys => + def list2string(xs: List[Data]): String = xs match { + case List() => "" + case y :: ys => " " + lisp2string(y) + list2string(ys) + } + "(" + lisp2string(y) + list2string(ys) + ")" + case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString() + } + + def apply(fn: Data, args: List[Data]): Data = fn match { + case Lambda(f) => f(args); + case _ => lispError("application of non-function: " + fn + " to " + args); + } + + def mkLambda(params: Data, expr: Data, env: Environment): Data = { + + def extendEnv(env: Environment, + ps: List[String], args: List[Data]): Environment = + Pair(ps, args) match { + case Pair(List(), List()) => + env + case Pair(p :: ps1, arg :: args1) => + extendEnv(env.extend(p, arg), ps1, args1) + case _ => + lispError("wrong number of arguments") + } + + val ps: List[String] = asList(params) map { + case Symbol(name) => name + case _ => sys.error("illegal parameter list"); + } + + Lambda(args => eval(expr, extendEnv(env, ps, args))) + } + + val globalEnv = EmptyEnvironment + .extend("=", Lambda{ + case List(arg1, arg2) => if(arg1 == arg2) 1 else 0}) + .extend("+", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 + arg2 + case List(arg1: String, arg2: String) => arg1 + arg2}) + .extend("-", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 - arg2}) + .extend("*", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 * arg2}) + .extend("/", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 / arg2}) + .extend("nil", Nil) + .extend("cons", Lambda{ + case List(arg1, arg2) => arg1 :: asList(arg2)}) + .extend("car", Lambda{ + case List(x :: xs) => x}) + .extend("cdr", Lambda{ + case List(x :: xs) => xs}) + .extend("null?", Lambda{ + case List(Nil) => 1 + case _ => 0}); + + def evaluate(x: Data): Data = eval(normalize(x), globalEnv); + def evaluate(s: String): Data = evaluate(string2lisp(s)); + + def string2lisp(s: String): Data = { + val it = new LispTokenizer(s); + def parseExpr(token: String): Data = { + if (token == "(") parseList + else if (token == ")") sys.error("unbalanced parentheses") + //else if (Character.isDigit(token.charAt(0))) + else if (token.charAt(0).isDigit) + token.toInt + else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') + token.substring(1,token.length() - 1) + else Symbol(token) + } + def parseList: List[Data] = { + val token = it.next; + if (token == ")") Nil else parseExpr(token) :: parseList + } + parseExpr(it.next) + } +} + +//############################################################################ +// List User + +class LispUser(lisp: Lisp) { + + import lisp._; + + def evaluate(s: String) = lisp2string(lisp.evaluate(s)); + + def run = { + + Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]); + Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))"))); + Console.println; + + Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))")); + Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))")); + Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))")); + Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))")); + Console.println("(null? '()) = " + evaluate("(null? (quote()))")); + Console.println; + + Console.println("faculty(10) = " + evaluate( + "(def (faculty n) " + + "(if (= n 0) " + + "1 " + + "(* n (faculty (- n 1)))) " + + "(faculty 10))")); + Console.println("faculty(10) = " + evaluate( + "(def (faculty n) " + + "(cond " + + "((= n 0) 1) " + + "(else (* n (faculty (- n 1))))) " + + "(faculty 10))")); + Console.println("foobar = " + evaluate( + "(def (foo n) " + + "(cond " + + "((= n 0) \"a\")" + + "((= n 1) \"b\")" + + "((= (/ n 2) 1) " + + "(cond " + + "((= n 2) \"c\")" + + "(else \"d\")))" + + "(else " + + "(def (bar m) " + + "(cond " + + "((= m 0) \"e\")" + + "((= m 1) \"f\")" + + "(else \"z\"))" + + "(bar (- n 4)))))" + + "(val nil (quote ())" + + "(val v1 (foo 0) " + + "(val v2 (+ (foo 1) (foo 2)) " + + "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " + + "(val v4 (foo 6) " + + "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))")); + Console.println; + } +} + +//############################################################################ +// Main + +object Test { + def main(args: Array[String]) { + new LispUser(LispCaseClasses).run; + new LispUser(LispAny).run; + () + } +} + +//############################################################################ -- cgit v1.2.3 From 399bd6240f775583ee9709311bd0b02e8359c15c Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Tue, 20 Mar 2012 10:35:10 -0700 Subject: Never write final fields outside of constructors. Closes SI-3569, SI-3770. Also threw in experimental -Yoverride-vars. It's not robust. --- src/compiler/scala/reflect/internal/Flags.scala | 2 +- .../scala/tools/nsc/backend/jvm/GenJVM.scala | 34 ++++++++++++++------ .../scala/tools/nsc/settings/ScalaSettings.scala | 3 +- .../scala/tools/nsc/typechecker/Namers.scala | 5 +-- .../scala/tools/nsc/typechecker/RefChecks.scala | 15 ++++++--- .../tools/nsc/typechecker/SuperAccessors.scala | 3 +- test/files/run/finalvar.check | 6 ++++ test/files/run/finalvar.flags | 1 + test/files/run/finalvar.scala | 37 ++++++++++++++++++++++ 9 files changed, 86 insertions(+), 20 deletions(-) create mode 100644 test/files/run/finalvar.check create mode 100644 test/files/run/finalvar.flags create mode 100644 test/files/run/finalvar.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Flags.scala b/src/compiler/scala/reflect/internal/Flags.scala index 3110d73461..ce1c8d0908 100644 --- a/src/compiler/scala/reflect/internal/Flags.scala +++ b/src/compiler/scala/reflect/internal/Flags.scala @@ -84,7 +84,7 @@ import scala.collection.{ mutable, immutable } */ class ModifierFlags { final val IMPLICIT = 0x00000200 - final val FINAL = 0x00000020 + final val FINAL = 0x00000020 // May not be overridden. Note that java final implies much more than scala final. final val PRIVATE = 0x00000004 final val PROTECTED = 0x00000001 diff --git a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala index 8e326c202a..b7b4212b93 100644 --- a/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala +++ b/src/compiler/scala/tools/nsc/backend/jvm/GenJVM.scala @@ -871,7 +871,7 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with debuglog("Adding field: " + f.symbol.fullName) val jfield = jclass.addNewField( - javaFlags(f.symbol) | javaFieldFlags(f.symbol), + javaFieldFlags(f.symbol), javaName(f.symbol), javaType(f.symbol.tpe) ) @@ -1915,16 +1915,30 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with val privateFlag = sym.isPrivate || (sym.isPrimaryConstructor && isTopLevelModule(sym.owner)) - // This does not check .isFinal (which checks flags for the FINAL flag), - // instead checking rawflags for that flag so as to exclude symbols which - // received lateFINAL. These symbols are eligible for inlining, but to - // avoid breaking proxy software which depends on subclassing, we avoid - // insisting on their finality in the bytecode. + // Final: the only fields which can receive ACC_FINAL are eager vals. + // Neither vars nor lazy vals can, because: + // + // Source: http://docs.oracle.com/javase/specs/jls/se7/html/jls-17.html#jls-17.5.3 + // "Another problem is that the specification allows aggressive + // optimization of final fields. Within a thread, it is permissible to + // reorder reads of a final field with those modifications of a final + // field that do not take place in the constructor." + // + // A var or lazy val which is marked final still has meaning to the + // scala compiler. The word final is heavily overloaded unfortunately; + // for us it means "not overridable". At present you can't override + // vars regardless; this may change. + // + // The logic does not check .isFinal (which checks flags for the FINAL flag, + // and includes symbols marked lateFINAL) instead inspecting rawflags so + // we can exclude lateFINAL. Such symbols are eligible for inlining, but to + // avoid breaking proxy software which depends on subclassing, we do not + // emit ACC_FINAL. val finalFlag = ( ((sym.rawflags & (Flags.FINAL | Flags.MODULE)) != 0) && !sym.enclClass.isInterface && !sym.isClassConstructor - && !sym.isMutable // fix for SI-3569, it is too broad? + && !sym.isMutable // lazy vals and vars both ) mkFlags( @@ -1939,13 +1953,13 @@ abstract class GenJVM extends SubComponent with GenJVMUtil with GenAndroid with if (sym.hasFlag(Flags.SYNCHRONIZED)) JAVA_ACC_SYNCHRONIZED else 0 ) } - def javaFieldFlags(sym: Symbol) = { - mkFlags( + def javaFieldFlags(sym: Symbol) = ( + javaFlags(sym) | mkFlags( if (sym hasAnnotation TransientAttr) ACC_TRANSIENT else 0, if (sym hasAnnotation VolatileAttr) ACC_VOLATILE else 0, if (sym.isMutable) 0 else ACC_FINAL ) - } + ) def isTopLevelModule(sym: Symbol): Boolean = afterPickler { sym.isModuleClass && !sym.isImplClass && !sym.isNestedClass } diff --git a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala index fdde8f9990..14b3bcc8ce 100644 --- a/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala +++ b/src/compiler/scala/tools/nsc/settings/ScalaSettings.scala @@ -31,7 +31,7 @@ trait ScalaSettings extends AbsScalaSettings protected def defaultClasspath = sys.env.getOrElse("CLASSPATH", ".") /** Enabled under -Xexperimental. */ - protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects) + protected def experimentalSettings = List[BooleanSetting](YmethodInfer, overrideObjects, overrideVars) /** Enabled under -Xfuture. */ protected def futureSettings = List[BooleanSetting]() @@ -117,6 +117,7 @@ trait ScalaSettings extends AbsScalaSettings * -Y "Private" settings */ val overrideObjects = BooleanSetting ("-Yoverride-objects", "Allow member objects to be overridden.") + val overrideVars = BooleanSetting ("-Yoverride-vars", "Allow vars to be overridden.") val Yhelp = BooleanSetting ("-Y", "Print a synopsis of private options.") val browse = PhasesSetting ("-Ybrowse", "Browse the abstract syntax tree after") val check = PhasesSetting ("-Ycheck", "Check the tree at the end of") diff --git a/src/compiler/scala/tools/nsc/typechecker/Namers.scala b/src/compiler/scala/tools/nsc/typechecker/Namers.scala index 6b27c27652..8604366bf2 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Namers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Namers.scala @@ -315,9 +315,10 @@ trait Namers extends MethodSynthesis { case DefDef(_, _, _, _, _, _) => owner.newMethod(name.toTermName, pos, flags) case ClassDef(_, _, _, _) => owner.newClassSymbol(name.toTypeName, pos, flags) case ModuleDef(_, _, _) => owner.newModule(name, pos, flags) - case ValDef(_, _, _, _) if isParameter => owner.newValueParameter(name, pos, flags) case PackageDef(pid, _) => createPackageSymbol(pos, pid) - case ValDef(_, _, _, _) => owner.newValue(name, pos, flags) + case ValDef(_, _, _, _) => + if (isParameter) owner.newValueParameter(name, pos, flags) + else owner.newValue(name, pos, flags) } } private def createFieldSymbol(tree: ValDef): TermSymbol = diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 013a74da7e..9177aca656 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -394,9 +394,14 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R overrideError("needs `override' modifier") } else if (other.isAbstractOverride && other.isIncompleteIn(clazz) && !member.isAbstractOverride) { overrideError("needs `abstract override' modifiers") - } else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) { - overrideError("cannot override a mutable variable") - } else if (member.isAnyOverride && + } + else if (member.isAnyOverride && (other hasFlag ACCESSOR) && other.accessed.isVariable && !other.accessed.isLazy) { + // !?! this is not covered by the spec. We need to resolve this either by changing the spec or removing the test here. + // !!! is there a !?! convention? I'm !!!ing this to make sure it turns up on my searches. + if (!settings.overrideVars.value) + overrideError("cannot override a mutable variable") + } + else if (member.isAnyOverride && !(member.owner.thisType.baseClasses exists (_ isSubClass other.owner)) && !member.isDeferred && !other.isDeferred && intersectionIsEmpty(member.extendedOverriddenSymbols, other.extendedOverriddenSymbols)) { @@ -1248,9 +1253,9 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R } List(tree1) } - case Import(_, _) => Nil + case Import(_, _) => Nil case DefDef(mods, _, _, _, _, _) if (mods hasFlag MACRO) => Nil - case _ => List(transform(tree)) + case _ => List(transform(tree)) } /* Check whether argument types conform to bounds of type parameters */ diff --git a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala index 4248b6f024..94733369a8 100644 --- a/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/SuperAccessors.scala @@ -259,7 +259,8 @@ abstract class SuperAccessors extends transform.Transform with transform.TypingT case sel @ Select(Super(_, mix), name) => if (sym.isValue && !sym.isMethod || sym.hasAccessorFlag) { - unit.error(tree.pos, "super may be not be used on "+ sym.accessedOrSelf) + if (!settings.overrideVars.value) + unit.error(tree.pos, "super may be not be used on "+ sym.accessedOrSelf) } else if (isDisallowed(sym)) { unit.error(tree.pos, "super not allowed here: use this." + name.decode + " instead") diff --git a/test/files/run/finalvar.check b/test/files/run/finalvar.check new file mode 100644 index 0000000000..2496293972 --- /dev/null +++ b/test/files/run/finalvar.check @@ -0,0 +1,6 @@ +(2,2,2,2,1) +(2,2,2,2) +(2,2,2,2,1001) +(2,2,2,2) +2 +10 diff --git a/test/files/run/finalvar.flags b/test/files/run/finalvar.flags new file mode 100644 index 0000000000..aee3039bec --- /dev/null +++ b/test/files/run/finalvar.flags @@ -0,0 +1 @@ +-Yoverride-vars -Yinline \ No newline at end of file diff --git a/test/files/run/finalvar.scala b/test/files/run/finalvar.scala new file mode 100644 index 0000000000..010813e520 --- /dev/null +++ b/test/files/run/finalvar.scala @@ -0,0 +1,37 @@ +object Final { + class X(final var x: Int) { } + def f = new X(0).x += 1 +} + +class A { + var x = 1 + def y0 = x + def y1 = this.x + def y2 = (this: A).x +} + +class B extends A { + override def x = 2 + def z = super.x +} + +object Test { + def main(args: Array[String]): Unit = { + Final.f + val a = new B + println((a.x, a.y0, a.y1, a.y2, a.z)) + val a0: A = a + println((a0.x, a0.y0, a0.y1, a0.y2)) + a.x = 1001 + println((a.x, a.y0, a.y1, a.y2, a.z)) + println((a0.x, a0.y0, a0.y1, a0.y2)) + + val d = new D + println(d.w) + d.ten + println(d.w) + } +} + +class C { var w = 1 ; def ten = this.w = 10 } +class D extends C { override var w = 2 } \ No newline at end of file -- cgit v1.2.3 From 622cc9967376d6cef57c7478587e20d6afe9503f Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Wed, 28 Mar 2012 17:54:19 -0700 Subject: Revert the lisp test. The lisp test enjoys the suffering of others. --- test/disabled/run/lisp.check | 26 +++ test/disabled/run/lisp.scala | 518 +++++++++++++++++++++++++++++++++++++++++++ test/files/run/lisp.check | 25 --- test/files/run/lisp.scala | 518 ------------------------------------------- 4 files changed, 544 insertions(+), 543 deletions(-) create mode 100644 test/disabled/run/lisp.check create mode 100644 test/disabled/run/lisp.scala delete mode 100644 test/files/run/lisp.check delete mode 100644 test/files/run/lisp.scala (limited to 'test/files') diff --git a/test/disabled/run/lisp.check b/test/disabled/run/lisp.check new file mode 100644 index 0000000000..64053f26d0 --- /dev/null +++ b/test/disabled/run/lisp.check @@ -0,0 +1,26 @@ +(lambda (x) (+ (* x x) 1)) +(lambda (x) (+ (* x x) 1)) + +( '(1 2 3)) = (1 2 3) +(car '(1 2 3)) = 1 +(cdr '(1 2 3)) = (2 3) +(null? '(2 3)) = 0 +(null? '()) = 1 + +faculty(10) = 3628800 +faculty(10) = 3628800 +foobar = ("a" "bc" "def" "z") + +List('lambda, List('x), List('+, List('*, 'x, 'x), 1)) +(lambda (x) (+ (* x x) 1)) + +( '(1 2 3)) = (1 2 3) +(car '(1 2 3)) = 1 +(cdr '(1 2 3)) = (2 3) +(null? '(2 3)) = 0 +(null? '()) = 1 + +faculty(10) = 3628800 +faculty(10) = 3628800 +foobar = ("a" "bc" "def" "z") + diff --git a/test/disabled/run/lisp.scala b/test/disabled/run/lisp.scala new file mode 100644 index 0000000000..06e68f508a --- /dev/null +++ b/test/disabled/run/lisp.scala @@ -0,0 +1,518 @@ +//############################################################################ +// Lisp interpreter +//############################################################################ + +//############################################################################ +// Lisp Scanner + +class LispTokenizer(s: String) extends Iterator[String] { + private var i = 0; + private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')' + def hasNext: Boolean = { + while (i < s.length() && s.charAt(i) <= ' ') i += 1 + i < s.length() + } + def next: String = + if (hasNext) { + val start = i + if (isDelimiter(s charAt i)) i += 1 + else + do i = i + 1 + while (!isDelimiter(s charAt i)) + s.substring(start, i) + } else sys.error("premature end of string") +} + +//############################################################################ +// Lisp Interface + +trait Lisp { + type Data + + def string2lisp(s: String): Data + def lisp2string(s: Data): String + + def evaluate(d: Data): Data + // !!! def evaluate(s: String): Data = evaluate(string2lisp(s)) + def evaluate(s: String): Data +} + +//############################################################################ +// Lisp Implementation Using Case Classes + +object LispCaseClasses extends Lisp { + + import List.range + + trait Data { + def elemsToString(): String = toString(); + } + case class CONS(car: Data, cdr: Data) extends Data { + override def toString() = "(" + elemsToString() + ")"; + override def elemsToString() = car.toString() + (cdr match { + case NIL() => "" + case _ => " " + cdr.elemsToString(); + }) + } + case class NIL() extends Data { // !!! use case object + override def toString() = "()"; + } + case class SYM(name: String) extends Data { + override def toString() = name; + } + case class NUM(x: Int) extends Data { + override def toString() = x.toString(); + } + case class STR(x: String) extends Data { + override def toString() = "\"" + x + "\""; + } + case class FUN(f: List[Data] => Data) extends Data { + override def toString() = ""; + } + + def list(): Data = + NIL(); + def list(x0: Data): Data = + CONS(x0, NIL()); + def list(x0: Data, x1: Data): Data = + CONS(x0, list(x1)); + def list(x0: Data, x1: Data, x2: Data): Data = + CONS(x0, list(x1, x2)); + def list(x0: Data, x1: Data, x2: Data, x3: Data): Data = + CONS(x0, list(x1, x2, x3)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data = + CONS(x0, list(x1, x2, x3, x4)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data, x8: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8)); + def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, + x6: Data, x7: Data, x8: Data, x9: Data): Data = + CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9)); + + var curexp: Data = null + var trace: Boolean = false + var indent: Int = 0 + + def lispError[a](msg: String): a = + sys.error("error: " + msg + "\n" + curexp); + + trait Environment { + def lookup(n: String): Data; + def extendRec(name: String, expr: Environment => Data) = + new Environment { + def lookup(n: String): Data = + if (n == name) expr(this) else Environment.this.lookup(n); + } + def extend(name: String, v: Data) = extendRec(name, (env1 => v)); + } + val EmptyEnvironment = new Environment { + def lookup(n: String): Data = lispError("undefined: " + n); + } + + def toList(x: Data): List[Data] = x match { + case NIL() => List() + case CONS(y, ys) => y :: toList(ys) + case _ => lispError("malformed list: " + x); + } + + def toBoolean(x: Data) = x match { + case NUM(0) => false + case _ => true + } + + def normalize(x: Data): Data = x match { + case CONS(SYM("def"), + CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) => + normalize(list(SYM("def"), + SYM(name), list(SYM("lambda"), args, body), expr)) + case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) => + normalize(expr) + case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) => + normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest))) + case CONS(h, t) => CONS(normalize(h), normalize(t)) + case _ => x + } + + def eval(x: Data, env: Environment): Data = { + val prevexp = curexp; + curexp = x; + if (trace) { + for (x <- range(1, indent)) Console.print(" "); + Console.println("===> " + x); + indent = indent + 1; + } + val result = eval1(x, env); + if (trace) { + indent = indent - 1; + for (x <- range(1, indent)) Console.print(" "); + Console.println("<=== " + result); + } + curexp = prevexp; + result + } + + def eval1(x: Data, env: Environment): Data = x match { + case SYM(name) => + env lookup name + case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => + eval(z, env.extendRec(name, (env1 => eval(y, env1)))) + case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => + eval(z, env.extend(name, eval(y, env))) + case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) => + mkLambda(params, y, env) + case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) => + if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env) + case CONS(SYM("quote"), CONS(x, NIL())) => + x + case CONS(y, xs) => + apply(eval(y, env), toList(xs) map (x => eval(x, env))) + case NUM(_) => x + case STR(_) => x + case FUN(_) => x + case _ => + lispError("illegal term") + } + + def apply(fn: Data, args: List[Data]): Data = fn match { + case FUN(f) => f(args); + case _ => lispError("application of non-function: " + fn); + } + + def mkLambda(params: Data, expr: Data, env: Environment): Data = { + + def extendEnv(env: Environment, + ps: List[String], args: List[Data]): Environment = + Pair(ps, args) match { + case Pair(List(), List()) => + env + case Pair(p :: ps1, arg :: args1) => + extendEnv(env.extend(p, arg), ps1, args1) + case _ => + lispError("wrong number of arguments") + } + + val ps: List[String] = toList(params) map { + case SYM(name) => name + case _ => sys.error("illegal parameter list"); + } + + FUN(args => eval(expr, extendEnv(env, ps, args))) + } + + val globalEnv = EmptyEnvironment + .extend("=", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0) + case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)})) + .extend("+", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2) + case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)})) + .extend("-", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)})) + .extend("*", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)})) + .extend("/", FUN({ + case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)})) + .extend("car", FUN({ + case List(CONS(x, xs)) => x})) + .extend("cdr", FUN({ + case List(CONS(x, xs)) => xs})) + .extend("null?", FUN({ + case List(NIL()) => NUM(1) + case _ => NUM(0)})) + .extend("cons", FUN({ + case List(x, y) => CONS(x, y)})); + + def evaluate(x: Data): Data = eval(normalize(x), globalEnv); + def evaluate(s: String): Data = evaluate(string2lisp(s)); + + def string2lisp(s: String): Data = { + val it = new LispTokenizer(s); + def parseExpr(token: String): Data = { + if (token == "(") parseList + else if (token == ")") sys.error("unbalanced parentheses") + else if ('0' <= token.charAt(0) && token.charAt(0) <= '9') + NUM(token.toInt) + else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') + STR(token.substring(1,token.length() - 1)) + else SYM(token) + } + def parseList: Data = { + val token = it.next; + if (token == ")") NIL() else CONS(parseExpr(token), parseList) + } + parseExpr(it.next) + } + + def lisp2string(d: Data): String = d.toString(); +} + +//############################################################################ +// Lisp Implementation Using Any + +object LispAny extends Lisp { + + import List._; + + type Data = Any; + + case class Lambda(f: List[Data] => Data); + + var curexp: Data = null; + var trace: Boolean = false; + var indent: Int = 0; + + def lispError[a](msg: String): a = + sys.error("error: " + msg + "\n" + curexp); + + trait Environment { + def lookup(n: String): Data; + def extendRec(name: String, expr: Environment => Data) = + new Environment { + def lookup(n: String): Data = + if (n == name) expr(this) else Environment.this.lookup(n); + } + def extend(name: String, v: Data) = extendRec(name, (env1 => v)); + } + val EmptyEnvironment = new Environment { + def lookup(n: String): Data = lispError("undefined: " + n); + } + + def asList(x: Data): List[Data] = x match { + case y: List[_] => y + case _ => lispError("malformed list: " + x) + } + + def asInt(x: Data): Int = x match { + case y: Int => y + case _ => lispError("not an integer: " + x) + } + + def asString(x: Data): String = x match { + case y: String => y + case _ => lispError("not a string: " + x) + } + + def asBoolean(x: Data): Boolean = x != 0 + + def normalize(x: Data): Data = x match { + case 'and :: x :: y :: Nil => + normalize('if :: x :: y :: 0 :: Nil) + case 'or :: x :: y :: Nil => + normalize('if :: x :: 1 :: y :: Nil) + case 'def :: (name :: args) :: body :: expr :: Nil => + normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil) + case 'cond :: ('else :: expr :: Nil) :: rest => + normalize(expr); + case 'cond :: (test :: expr :: Nil) :: rest => + normalize('if :: test :: expr :: ('cond :: rest) :: Nil) + case 'cond :: 'else :: expr :: Nil => + normalize(expr) + case h :: t => + normalize(h) :: asList(normalize(t)) + case _ => + x + } + + def eval(x: Data, env: Environment): Data = { + val prevexp = curexp; + curexp = x; + if (trace) { + for (x <- range(1, indent)) Console.print(" "); + Console.println("===> " + x); + indent += 1; + } + val result = eval1(x, env); + if (trace) { + indent -= 1; + for (x <- range(1, indent)) Console.print(" "); + Console.println("<=== " + result); + } + curexp = prevexp; + result + } + + def eval1(x: Data, env: Environment): Data = x match { + case Symbol(name) => + env lookup name + case 'def :: Symbol(name) :: y :: z :: Nil => + eval(z, env.extendRec(name, (env1 => eval(y, env1)))) + case 'val :: Symbol(name) :: y :: z :: Nil => + eval(z, env.extend(name, eval(y, env))) + case 'lambda :: params :: y :: Nil => + mkLambda(params, y, env) + case 'if :: c :: y :: z :: Nil => + if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env) + case 'quote :: y :: Nil => + y + case y :: z => + apply(eval(y, env), z map (x => eval(x, env))) + case Lambda(_) => x + case y: String => x + case y: Int => x + case y => lispError("illegal term") + } + + def lisp2string(x: Data): String = x match { + case Symbol(name) => name + case Nil => "()" + case y :: ys => + def list2string(xs: List[Data]): String = xs match { + case List() => "" + case y :: ys => " " + lisp2string(y) + list2string(ys) + } + "(" + lisp2string(y) + list2string(ys) + ")" + case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString() + } + + def apply(fn: Data, args: List[Data]): Data = fn match { + case Lambda(f) => f(args); + case _ => lispError("application of non-function: " + fn + " to " + args); + } + + def mkLambda(params: Data, expr: Data, env: Environment): Data = { + + def extendEnv(env: Environment, + ps: List[String], args: List[Data]): Environment = + Pair(ps, args) match { + case Pair(List(), List()) => + env + case Pair(p :: ps1, arg :: args1) => + extendEnv(env.extend(p, arg), ps1, args1) + case _ => + lispError("wrong number of arguments") + } + + val ps: List[String] = asList(params) map { + case Symbol(name) => name + case _ => sys.error("illegal parameter list"); + } + + Lambda(args => eval(expr, extendEnv(env, ps, args))) + } + + val globalEnv = EmptyEnvironment + .extend("=", Lambda{ + case List(arg1, arg2) => if(arg1 == arg2) 1 else 0}) + .extend("+", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 + arg2 + case List(arg1: String, arg2: String) => arg1 + arg2}) + .extend("-", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 - arg2}) + .extend("*", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 * arg2}) + .extend("/", Lambda{ + case List(arg1: Int, arg2: Int) => arg1 / arg2}) + .extend("nil", Nil) + .extend("cons", Lambda{ + case List(arg1, arg2) => arg1 :: asList(arg2)}) + .extend("car", Lambda{ + case List(x :: xs) => x}) + .extend("cdr", Lambda{ + case List(x :: xs) => xs}) + .extend("null?", Lambda{ + case List(Nil) => 1 + case _ => 0}); + + def evaluate(x: Data): Data = eval(normalize(x), globalEnv); + def evaluate(s: String): Data = evaluate(string2lisp(s)); + + def string2lisp(s: String): Data = { + val it = new LispTokenizer(s); + def parseExpr(token: String): Data = { + if (token == "(") parseList + else if (token == ")") sys.error("unbalanced parentheses") + //else if (Character.isDigit(token.charAt(0))) + else if (token.charAt(0).isDigit) + token.toInt + else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') + token.substring(1,token.length() - 1) + else Symbol(token) + } + def parseList: List[Data] = { + val token = it.next; + if (token == ")") Nil else parseExpr(token) :: parseList + } + parseExpr(it.next) + } +} + +//############################################################################ +// List User + +class LispUser(lisp: Lisp) { + + import lisp._; + + def evaluate(s: String) = lisp2string(lisp.evaluate(s)); + + def run = { + + Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]); + Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))"))); + Console.println; + + Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))")); + Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))")); + Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))")); + Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))")); + Console.println("(null? '()) = " + evaluate("(null? (quote()))")); + Console.println; + + Console.println("faculty(10) = " + evaluate( + "(def (faculty n) " + + "(if (= n 0) " + + "1 " + + "(* n (faculty (- n 1)))) " + + "(faculty 10))")); + Console.println("faculty(10) = " + evaluate( + "(def (faculty n) " + + "(cond " + + "((= n 0) 1) " + + "(else (* n (faculty (- n 1))))) " + + "(faculty 10))")); + Console.println("foobar = " + evaluate( + "(def (foo n) " + + "(cond " + + "((= n 0) \"a\")" + + "((= n 1) \"b\")" + + "((= (/ n 2) 1) " + + "(cond " + + "((= n 2) \"c\")" + + "(else \"d\")))" + + "(else " + + "(def (bar m) " + + "(cond " + + "((= m 0) \"e\")" + + "((= m 1) \"f\")" + + "(else \"z\"))" + + "(bar (- n 4)))))" + + "(val nil (quote ())" + + "(val v1 (foo 0) " + + "(val v2 (+ (foo 1) (foo 2)) " + + "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " + + "(val v4 (foo 6) " + + "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))")); + Console.println; + } +} + +//############################################################################ +// Main + +object Test { + def main(args: Array[String]) { + new LispUser(LispCaseClasses).run; + new LispUser(LispAny).run; + () + } +} + +//############################################################################ diff --git a/test/files/run/lisp.check b/test/files/run/lisp.check deleted file mode 100644 index 38ca7b655d..0000000000 --- a/test/files/run/lisp.check +++ /dev/null @@ -1,25 +0,0 @@ -(lambda (x) (+ (* x x) 1)) -(lambda (x) (+ (* x x) 1)) - -( '(1 2 3)) = (1 2 3) -(car '(1 2 3)) = 1 -(cdr '(1 2 3)) = (2 3) -(null? '(2 3)) = 0 -(null? '()) = 1 - -faculty(10) = 3628800 -faculty(10) = 3628800 -foobar = ("a" "bc" "def" "z") - -List('lambda, List('x), List('+, List('*, 'x, 'x), 1)) -(lambda (x) (+ (* x x) 1)) - -( '(1 2 3)) = (1 2 3) -(car '(1 2 3)) = 1 -(cdr '(1 2 3)) = (2 3) -(null? '(2 3)) = 0 -(null? '()) = 1 - -faculty(10) = 3628800 -faculty(10) = 3628800 -foobar = ("a" "bc" "def" "z") diff --git a/test/files/run/lisp.scala b/test/files/run/lisp.scala deleted file mode 100644 index 07f44e1151..0000000000 --- a/test/files/run/lisp.scala +++ /dev/null @@ -1,518 +0,0 @@ -//############################################################################ -// Lisp interpreter -//############################################################################ - -//############################################################################ -// Lisp Scanner - -class LispTokenizer(s: String) extends Iterator[String] { - private var i = 0; - private def isDelimiter(ch: Char) = ch <= ' ' || ch == '(' || ch == ')' - def hasNext: Boolean = { - while (i < s.length() && s.charAt(i) <= ' ') i += 1 - i < s.length() - } - def next: String = - if (hasNext) { - val start = i - if (isDelimiter(s charAt i)) i += 1 - else - do i = i + 1 - while (!isDelimiter(s charAt i)) - s.substring(start, i) - } else sys.error("premature end of string") -} - -//############################################################################ -// Lisp Interface - -trait Lisp { - type Data - - def string2lisp(s: String): Data - def lisp2string(s: Data): String - - def evaluate(d: Data): Data - // !!! def evaluate(s: String): Data = evaluate(string2lisp(s)) - def evaluate(s: String): Data -} - -//############################################################################ -// Lisp Implementation Using Case Classes - -object LispCaseClasses extends Lisp { - - import List.range - - trait Data { - def elemsToString(): String = toString(); - } - case class CONS(car: Data, cdr: Data) extends Data { - override def toString() = "(" + elemsToString() + ")"; - override def elemsToString() = car.toString() + (cdr match { - case NIL() => "" - case _ => " " + cdr.elemsToString(); - }) - } - case class NIL() extends Data { // !!! use case object - override def toString() = "()"; - } - case class SYM(name: String) extends Data { - override def toString() = name; - } - case class NUM(x: Int) extends Data { - override def toString() = x.toString(); - } - case class STR(x: String) extends Data { - override def toString() = "\"" + x + "\""; - } - case class FUN(f: List[Data] => Data) extends Data { - override def toString() = ""; - } - - def list(): Data = - NIL(); - def list(x0: Data): Data = - CONS(x0, NIL()); - def list(x0: Data, x1: Data): Data = - CONS(x0, list(x1)); - def list(x0: Data, x1: Data, x2: Data): Data = - CONS(x0, list(x1, x2)); - def list(x0: Data, x1: Data, x2: Data, x3: Data): Data = - CONS(x0, list(x1, x2, x3)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data): Data = - CONS(x0, list(x1, x2, x3, x4)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data, x8: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8)); - def list(x0: Data, x1: Data, x2: Data, x3: Data, x4: Data, x5: Data, - x6: Data, x7: Data, x8: Data, x9: Data): Data = - CONS(x0, list(x1, x2, x3, x4, x5, x6, x7, x8, x9)); - - var curexp: Data = null - var trace: Boolean = false - var indent: Int = 0 - - def lispError[a](msg: String): a = - sys.error("error: " + msg + "\n" + curexp); - - trait Environment { - def lookup(n: String): Data; - def extendRec(name: String, expr: Environment => Data) = - new Environment { - def lookup(n: String): Data = - if (n == name) expr(this) else Environment.this.lookup(n); - } - def extend(name: String, v: Data) = extendRec(name, (env1 => v)); - } - val EmptyEnvironment = new Environment { - def lookup(n: String): Data = lispError("undefined: " + n); - } - - def toList(x: Data): List[Data] = x match { - case NIL() => List() - case CONS(y, ys) => y :: toList(ys) - case _ => lispError("malformed list: " + x); - } - - def toBoolean(x: Data) = x match { - case NUM(0) => false - case _ => true - } - - def normalize(x: Data): Data = x match { - case CONS(SYM("def"), - CONS(CONS(SYM(name), args), CONS(body, CONS(expr, NIL())))) => - normalize(list(SYM("def"), - SYM(name), list(SYM("lambda"), args, body), expr)) - case CONS(SYM("cond"), CONS(CONS(SYM("else"), CONS(expr, NIL())),NIL())) => - normalize(expr) - case CONS(SYM("cond"), CONS(CONS(test, CONS(expr, NIL())), rest)) => - normalize(list(SYM("if"), test, expr, CONS(SYM("cond"), rest))) - case CONS(h, t) => CONS(normalize(h), normalize(t)) - case _ => x - } - - def eval(x: Data, env: Environment): Data = { - val prevexp = curexp; - curexp = x; - if (trace) { - for (x <- range(1, indent)) Console.print(" "); - Console.println("===> " + x); - indent = indent + 1; - } - val result = eval1(x, env); - if (trace) { - indent = indent - 1; - for (x <- range(1, indent)) Console.print(" "); - Console.println("<=== " + result); - } - curexp = prevexp; - result - } - - def eval1(x: Data, env: Environment): Data = x match { - case SYM(name) => - env lookup name - case CONS(SYM("def"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => - eval(z, env.extendRec(name, (env1 => eval(y, env1)))) - case CONS(SYM("val"), CONS(SYM(name), CONS(y, CONS(z, NIL())))) => - eval(z, env.extend(name, eval(y, env))) - case CONS(SYM("lambda"), CONS(params, CONS(y, NIL()))) => - mkLambda(params, y, env) - case CONS(SYM("if"), CONS(c, CONS(t, CONS(e, NIL())))) => - if (toBoolean(eval(c, env))) eval(t, env) else eval(e, env) - case CONS(SYM("quote"), CONS(x, NIL())) => - x - case CONS(y, xs) => - apply(eval(y, env), toList(xs) map (x => eval(x, env))) - case NUM(_) => x - case STR(_) => x - case FUN(_) => x - case _ => - lispError("illegal term") - } - - def apply(fn: Data, args: List[Data]): Data = fn match { - case FUN(f) => f(args); - case _ => lispError("application of non-function: " + fn); - } - - def mkLambda(params: Data, expr: Data, env: Environment): Data = { - - def extendEnv(env: Environment, - ps: List[String], args: List[Data]): Environment = - Pair(ps, args) match { - case Pair(List(), List()) => - env - case Pair(p :: ps1, arg :: args1) => - extendEnv(env.extend(p, arg), ps1, args1) - case _ => - lispError("wrong number of arguments") - } - - val ps: List[String] = toList(params) map { - case SYM(name) => name - case _ => sys.error("illegal parameter list"); - } - - FUN(args => eval(expr, extendEnv(env, ps, args))) - } - - val globalEnv = EmptyEnvironment - .extend("=", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(if (arg1 == arg2) 1 else 0) - case List(STR(arg1),STR(arg2)) => NUM(if (arg1 == arg2) 1 else 0)})) - .extend("+", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 + arg2) - case List(STR(arg1),STR(arg2)) => STR(arg1 + arg2)})) - .extend("-", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 - arg2)})) - .extend("*", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 * arg2)})) - .extend("/", FUN({ - case List(NUM(arg1),NUM(arg2)) => NUM(arg1 / arg2)})) - .extend("car", FUN({ - case List(CONS(x, xs)) => x})) - .extend("cdr", FUN({ - case List(CONS(x, xs)) => xs})) - .extend("null?", FUN({ - case List(NIL()) => NUM(1) - case _ => NUM(0)})) - .extend("cons", FUN({ - case List(x, y) => CONS(x, y)})); - - def evaluate(x: Data): Data = eval(normalize(x), globalEnv); - def evaluate(s: String): Data = evaluate(string2lisp(s)); - - def string2lisp(s: String): Data = { - val it = new LispTokenizer(s); - def parseExpr(token: String): Data = { - if (token == "(") parseList - else if (token == ")") sys.error("unbalanced parentheses") - else if ('0' <= token.charAt(0) && token.charAt(0) <= '9') - NUM(token.toInt) - else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') - STR(token.substring(1,token.length() - 1)) - else SYM(token) - } - def parseList: Data = { - val token = it.next; - if (token == ")") NIL() else CONS(parseExpr(token), parseList) - } - parseExpr(it.next) - } - - def lisp2string(d: Data): String = d.toString(); -} - -//############################################################################ -// Lisp Implementation Using Any - -object LispAny extends Lisp { - - import List._; - - type Data = Any; - - case class Lambda(f: List[Data] => Data); - - var curexp: Data = null; - var trace: Boolean = false; - var indent: Int = 0; - - def lispError[a](msg: String): a = - sys.error("error: " + msg + "\n" + curexp); - - trait Environment { - def lookup(n: String): Data; - def extendRec(name: String, expr: Environment => Data) = - new Environment { - def lookup(n: String): Data = - if (n == name) expr(this) else Environment.this.lookup(n); - } - def extend(name: String, v: Data) = extendRec(name, (env1 => v)); - } - val EmptyEnvironment = new Environment { - def lookup(n: String): Data = lispError("undefined: " + n); - } - - def asList(x: Data): List[Data] = x match { - case y: List[_] => y - case _ => lispError("malformed list: " + x) - } - - def asInt(x: Data): Int = x match { - case y: Int => y - case _ => lispError("not an integer: " + x) - } - - def asString(x: Data): String = x match { - case y: String => y - case _ => lispError("not a string: " + x) - } - - def asBoolean(x: Data): Boolean = x != 0 - - def normalize(x: Data): Data = x match { - case 'and :: x :: y :: Nil => - normalize('if :: x :: y :: 0 :: Nil) - case 'or :: x :: y :: Nil => - normalize('if :: x :: 1 :: y :: Nil) - case 'def :: (name :: args) :: body :: expr :: Nil => - normalize('def :: name :: ('lambda :: args :: body :: Nil) :: expr :: Nil) - case 'cond :: ('else :: expr :: Nil) :: rest => - normalize(expr); - case 'cond :: (test :: expr :: Nil) :: rest => - normalize('if :: test :: expr :: ('cond :: rest) :: Nil) - case 'cond :: 'else :: expr :: Nil => - normalize(expr) - case h :: t => - normalize(h) :: asList(normalize(t)) - case _ => - x - } - - def eval(x: Data, env: Environment): Data = { - val prevexp = curexp; - curexp = x; - if (trace) { - for (x <- range(1, indent)) Console.print(" "); - Console.println("===> " + x); - indent += 1; - } - val result = eval1(x, env); - if (trace) { - indent -= 1; - for (x <- range(1, indent)) Console.print(" "); - Console.println("<=== " + result); - } - curexp = prevexp; - result - } - - def eval1(x: Data, env: Environment): Data = x match { - case Symbol(name) => - env lookup name - case 'def :: Symbol(name) :: y :: z :: Nil => - eval(z, env.extendRec(name, (env1 => eval(y, env1)))) - case 'val :: Symbol(name) :: y :: z :: Nil => - eval(z, env.extend(name, eval(y, env))) - case 'lambda :: params :: y :: Nil => - mkLambda(params, y, env) - case 'if :: c :: y :: z :: Nil => - if (asBoolean(eval(c, env))) eval(y, env) else eval(z, env) - case 'quote :: y :: Nil => - y - case y :: z => - apply(eval(y, env), z map (x => eval(x, env))) - case Lambda(_) => x - case y: String => x - case y: Int => x - case y => lispError("illegal term") - } - - def lisp2string(x: Data): String = x match { - case Symbol(name) => name - case Nil => "()" - case y :: ys => - def list2string(xs: List[Data]): String = xs match { - case List() => "" - case y :: ys => " " + lisp2string(y) + list2string(ys) - } - "(" + lisp2string(y) + list2string(ys) + ")" - case _ => if (x.isInstanceOf[String]) "\"" + x + "\""; else x.toString() - } - - def apply(fn: Data, args: List[Data]): Data = fn match { - case Lambda(f) => f(args); - case _ => lispError("application of non-function: " + fn + " to " + args); - } - - def mkLambda(params: Data, expr: Data, env: Environment): Data = { - - def extendEnv(env: Environment, - ps: List[String], args: List[Data]): Environment = - Pair(ps, args) match { - case Pair(List(), List()) => - env - case Pair(p :: ps1, arg :: args1) => - extendEnv(env.extend(p, arg), ps1, args1) - case _ => - lispError("wrong number of arguments") - } - - val ps: List[String] = asList(params) map { - case Symbol(name) => name - case _ => sys.error("illegal parameter list"); - } - - Lambda(args => eval(expr, extendEnv(env, ps, args))) - } - - val globalEnv = EmptyEnvironment - .extend("=", Lambda{ - case List(arg1, arg2) => if(arg1 == arg2) 1 else 0}) - .extend("+", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 + arg2 - case List(arg1: String, arg2: String) => arg1 + arg2}) - .extend("-", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 - arg2}) - .extend("*", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 * arg2}) - .extend("/", Lambda{ - case List(arg1: Int, arg2: Int) => arg1 / arg2}) - .extend("nil", Nil) - .extend("cons", Lambda{ - case List(arg1, arg2) => arg1 :: asList(arg2)}) - .extend("car", Lambda{ - case List(x :: xs) => x}) - .extend("cdr", Lambda{ - case List(x :: xs) => xs}) - .extend("null?", Lambda{ - case List(Nil) => 1 - case _ => 0}); - - def evaluate(x: Data): Data = eval(normalize(x), globalEnv); - def evaluate(s: String): Data = evaluate(string2lisp(s)); - - def string2lisp(s: String): Data = { - val it = new LispTokenizer(s); - def parseExpr(token: String): Data = { - if (token == "(") parseList - else if (token == ")") sys.error("unbalanced parentheses") - //else if (Character.isDigit(token.charAt(0))) - else if (token.charAt(0).isDigit) - token.toInt - else if (token.charAt(0) == '\"' && token.charAt(token.length()-1)=='\"') - token.substring(1,token.length() - 1) - else Symbol(token) - } - def parseList: List[Data] = { - val token = it.next; - if (token == ")") Nil else parseExpr(token) :: parseList - } - parseExpr(it.next) - } -} - -//############################################################################ -// List User - -class LispUser(lisp: Lisp) { - - import lisp._; - - def evaluate(s: String) = lisp2string(lisp.evaluate(s)); - - def run = { - - Console.println(string2lisp("(lambda (x) (+ (* x x) 1))").asInstanceOf[AnyRef]); - Console.println(lisp2string(string2lisp("(lambda (x) (+ (* x x) 1))"))); - Console.println; - - Console.println("( '(1 2 3)) = " + evaluate(" (quote(1 2 3))")); - Console.println("(car '(1 2 3)) = " + evaluate("(car (quote(1 2 3)))")); - Console.println("(cdr '(1 2 3)) = " + evaluate("(cdr (quote(1 2 3)))")); - Console.println("(null? '(2 3)) = " + evaluate("(null? (quote(2 3)))")); - Console.println("(null? '()) = " + evaluate("(null? (quote()))")); - Console.println; - - Console.println("faculty(10) = " + evaluate( - "(def (faculty n) " + - "(if (= n 0) " + - "1 " + - "(* n (faculty (- n 1)))) " + - "(faculty 10))")); - Console.println("faculty(10) = " + evaluate( - "(def (faculty n) " + - "(cond " + - "((= n 0) 1) " + - "(else (* n (faculty (- n 1))))) " + - "(faculty 10))")); - Console.println("foobar = " + evaluate( - "(def (foo n) " + - "(cond " + - "((= n 0) \"a\")" + - "((= n 1) \"b\")" + - "((= (/ n 2) 1) " + - "(cond " + - "((= n 2) \"c\")" + - "(else \"d\")))" + - "(else " + - "(def (bar m) " + - "(cond " + - "((= m 0) \"e\")" + - "((= m 1) \"f\")" + - "(else \"z\"))" + - "(bar (- n 4)))))" + - "(val nil (quote ())" + - "(val v1 (foo 0) " + - "(val v2 (+ (foo 1) (foo 2)) " + - "(val v3 (+ (+ (foo 3) (foo 4)) (foo 5)) " + - "(val v4 (foo 6) " + - "(cons v1 (cons v2 (cons v3 (cons v4 nil))))))))))")); - Console.println; - } -} - -//############################################################################ -// Main - -object Test { - def main(args: Array[String]) { - new LispUser(LispCaseClasses).run; - new LispUser(LispAny).run; - () - } -} - -//############################################################################ -- cgit v1.2.3 From 66b47e1a8c11196d648ed5a98f934a1c65203a65 Mon Sep 17 00:00:00 2001 From: Pavel Pavlov Date: Mon, 16 Jan 2012 07:57:09 +0700 Subject: a fast, functional PartialFunction implementation runtime.AbstractPartialFunction provides a default implementation for the new-style partial function. In principle this class is only subclassed by compiler-generated partial functions arising from matches. Either - the apply method (old-style partialfun) or - the applyOrElse method (current scheme) must be overridden, and the isDefinedAt method implemented. The applyOrElse method implementation is provided to ease the transition from the old scheme, since starr still generates old-style PartialFunctions, but locker's library has the new AbstractPartialFunction. Thus, this implementation is intended as a drop-in replacement for the old partial function, and does not require changes to the compiler. (compiler patches, both for old and new-style pattern matching, follow) - runtime.AbstractPartialFunction is based on PartialFunction.WithDefault Original version of FunctionWithDefault by Odersky (http://article.gmane.org/gmane.comp.lang.scala.internals/4032) - better performance for OrElse#applyOrElse, OrElse#lift, PF.cond - new combinator methods: PF#run, PF#runWith, PF.apply authored by @pavelpavlov, refactored by @adriaanm, review by @paulp --- src/actors/scala/actors/Actor.scala | 8 +- src/actors/scala/actors/Future.scala | 4 +- src/actors/scala/actors/Reactor.scala | 4 +- src/compiler/scala/tools/cmd/FromString.scala | 12 +- .../nsc/interpreter/AbstractOrMissingHandler.scala | 4 +- .../tools/nsc/matching/ParallelMatching.scala | 4 +- .../scala/tools/nsc/transform/UnCurry.scala | 1 + src/library/scala/Function.scala | 7 +- src/library/scala/PartialFunction.scala | 134 ++++++++++++++++----- .../scala/runtime/AbstractPartialFunction.scala | 84 +++++++------ src/library/scala/util/control/Exception.scala | 5 +- test/files/run/lift-and-unlift.scala | 6 +- 12 files changed, 177 insertions(+), 96 deletions(-) (limited to 'test/files') diff --git a/src/actors/scala/actors/Actor.scala b/src/actors/scala/actors/Actor.scala index aab533ae8d..bc9bbc6ef0 100644 --- a/src/actors/scala/actors/Actor.scala +++ b/src/actors/scala/actors/Actor.scala @@ -1,3 +1,5 @@ + + /* __ *\ ** ________ ___ / / ___ Scala API ** ** / __/ __// _ | / / / _ | (c) 2005-2011, LAMP/EPFL ** @@ -246,8 +248,8 @@ object Actor extends Combinators { rawSelf.react(new RecursiveProxyHandler(rawSelf, f)) private class RecursiveProxyHandler(a: InternalReplyReactor, f: PartialFunction[Any, Unit]) - extends scala.runtime.AbstractPartialFunction[Any, Unit] { - def _isDefinedAt(m: Any): Boolean = + extends PartialFunction[Any, Unit] { + def isDefinedAt(m: Any): Boolean = true // events are immediately removed from the mailbox def apply(m: Any) { if (f.isDefinedAt(m)) f(m) @@ -404,5 +406,5 @@ trait Actor extends InternalActor with ReplyReactor { this } -} + } diff --git a/src/actors/scala/actors/Future.scala b/src/actors/scala/actors/Future.scala index eec43013d3..735c13190b 100644 --- a/src/actors/scala/actors/Future.scala +++ b/src/actors/scala/actors/Future.scala @@ -200,8 +200,8 @@ object Futures { Actor.timer.schedule(timerTask, timeout) def awaitWith(partFuns: Seq[PartialFunction[Any, Pair[Int, Any]]]) { - val reaction: PartialFunction[Any, Unit] = new scala.runtime.AbstractPartialFunction[Any, Unit] { - def _isDefinedAt(msg: Any) = msg match { + val reaction: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] { + def isDefinedAt(msg: Any) = msg match { case TIMEOUT => true case _ => partFuns exists (_ isDefinedAt msg) } diff --git a/src/actors/scala/actors/Reactor.scala b/src/actors/scala/actors/Reactor.scala index 8fc7578344..206a97d97c 100644 --- a/src/actors/scala/actors/Reactor.scala +++ b/src/actors/scala/actors/Reactor.scala @@ -38,8 +38,8 @@ private[actors] object Reactor { } } - val waitingForNone: PartialFunction[Any, Unit] = new scala.runtime.AbstractPartialFunction[Any, Unit] { - def _isDefinedAt(x: Any) = false + val waitingForNone: PartialFunction[Any, Unit] = new PartialFunction[Any, Unit] { + def isDefinedAt(x: Any) = false def apply(x: Any) {} } } diff --git a/src/compiler/scala/tools/cmd/FromString.scala b/src/compiler/scala/tools/cmd/FromString.scala index e4504702d4..3792c26c34 100644 --- a/src/compiler/scala/tools/cmd/FromString.scala +++ b/src/compiler/scala/tools/cmd/FromString.scala @@ -14,9 +14,9 @@ import scala.reflect.OptManifest * example instances are in the companion object, but in general * either IntFromString will suffice or you'll want custom transformers. */ -abstract class FromString[+T](implicit m: OptManifest[T]) extends scala.runtime.AbstractPartialFunction[String, T] { +abstract class FromString[+T](implicit m: OptManifest[T]) extends PartialFunction[String, T] { def apply(s: String): T - def _isDefinedAt(s: String): Boolean = true + def isDefinedAt(s: String): Boolean = true def zero: T = apply("") def targetString: String = m.toString @@ -30,20 +30,20 @@ object FromString { /** Path related stringifiers. */ val ExistingFile: FromString[File] = new FromString[File] { - override def _isDefinedAt(s: String) = toFile(s).isFile + override def isDefinedAt(s: String) = toFile(s).isFile def apply(s: String): File = if (isDefinedAt(s)) toFile(s) else cmd.runAndExit(println("'%s' is not an existing file." format s)) } val ExistingDir: FromString[Directory] = new FromString[Directory] { - override def _isDefinedAt(s: String) = toDir(s).isDirectory + override def isDefinedAt(s: String) = toDir(s).isDirectory def apply(s: String): Directory = if (isDefinedAt(s)) toDir(s) else cmd.runAndExit(println("'%s' is not an existing directory." format s)) } def ExistingDirRelativeTo(root: Directory) = new FromString[Directory] { private def resolve(s: String) = toDir(s) toAbsoluteWithRoot root toDirectory - override def _isDefinedAt(s: String) = resolve(s).isDirectory + override def isDefinedAt(s: String) = resolve(s).isDirectory def apply(s: String): Directory = if (isDefinedAt(s)) resolve(s) else cmd.runAndExit(println("'%s' is not an existing directory." format resolve(s))) @@ -65,7 +65,7 @@ object FromString { /** Implicit as the most likely to be useful as-is. */ implicit val IntFromString: FromString[Int] = new FromString[Int] { - override def _isDefinedAt(s: String) = safeToInt(s).isDefined + override def isDefinedAt(s: String) = safeToInt(s).isDefined def apply(s: String) = safeToInt(s).get def safeToInt(s: String): Option[Int] = try Some(java.lang.Integer.parseInt(s)) catch { case _: NumberFormatException => None } } diff --git a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala index 33ef4a432d..2f47685757 100644 --- a/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala +++ b/src/compiler/scala/tools/nsc/interpreter/AbstractOrMissingHandler.scala @@ -6,8 +6,8 @@ package scala.tools.nsc package interpreter -class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends scala.runtime.AbstractPartialFunction[Throwable, T] { - def _isDefinedAt(t: Throwable) = t match { +class AbstractOrMissingHandler[T](onError: String => Unit, value: T) extends PartialFunction[Throwable, T] { + def isDefinedAt(t: Throwable) = t match { case _: AbstractMethodError => true case _: NoSuchMethodError => true case _: MissingRequirementError => true diff --git a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala index be5a9907b8..43aad9f591 100644 --- a/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala +++ b/src/compiler/scala/tools/nsc/matching/ParallelMatching.scala @@ -425,7 +425,7 @@ trait ParallelMatching extends ast.TreeDSL // Should the given pattern join the expanded pivot in the success matrix? If so, // this partial function will be defined for the pattern, and the result of the apply // is the expanded sequence of new patterns. - lazy val successMatrixFn = new scala.runtime.AbstractPartialFunction[Pattern, List[Pattern]] { + lazy val successMatrixFn = new PartialFunction[Pattern, List[Pattern]] { private def seqIsDefinedAt(x: SequenceLikePattern) = (hasStar, x.hasStar) match { case (true, true) => true case (true, false) => pivotLen <= x.nonStarLength @@ -433,7 +433,7 @@ trait ParallelMatching extends ast.TreeDSL case (false, false) => pivotLen == x.nonStarLength } - def _isDefinedAt(pat: Pattern) = pat match { + def isDefinedAt(pat: Pattern) = pat match { case x: SequenceLikePattern => seqIsDefinedAt(x) case WildcardPattern() => true case _ => false diff --git a/src/compiler/scala/tools/nsc/transform/UnCurry.scala b/src/compiler/scala/tools/nsc/transform/UnCurry.scala index e54e0289bb..0d39c040f7 100644 --- a/src/compiler/scala/tools/nsc/transform/UnCurry.scala +++ b/src/compiler/scala/tools/nsc/transform/UnCurry.scala @@ -210,6 +210,7 @@ abstract class UnCurry extends InfoTransform * body = expr match { case P_i if G_i => E_i }_i=1..n * to: * + //TODO: correct code template below * class $anon() extends AbstractPartialFunction[T, R] with Serializable { * def apply(x: T): R = (expr: @unchecked) match { * case P_1 if G_1 => E_1 diff --git a/src/library/scala/Function.scala b/src/library/scala/Function.scala index 4a10b65735..9fa56a332f 100644 --- a/src/library/scala/Function.scala +++ b/src/library/scala/Function.scala @@ -29,6 +29,7 @@ object Function { /** Turns a function `A => Option[B]` into a `PartialFunction[A, B]`. * + * TODO: check if the paragraph below is still correct * '''Important note''': this transformation implies the original function * will be called 2 or more times on each logical invocation, because the * only way to supply an implementation of `isDefinedAt` is to call the @@ -39,11 +40,7 @@ object Function { * f returns `Some(_)` and undefined where `f` returns `None`. * @see [[scala.PartialFunction#lift]] */ - def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = new runtime.AbstractPartialFunction[T, R] { - def apply(x: T): R = f(x).get - def _isDefinedAt(x: T): Boolean = f(x).isDefined - override def lift: T => Option[R] = f - } + def unlift[T, R](f: T => Option[R]): PartialFunction[T, R] = PartialFunction.unlifted(f) /** Uncurrying for functions of arity 2. This transforms a unary function * returning another unary function into a function of arity 2. diff --git a/src/library/scala/PartialFunction.scala b/src/library/scala/PartialFunction.scala index 3c5d6d0d23..7154b8da34 100644 --- a/src/library/scala/PartialFunction.scala +++ b/src/library/scala/PartialFunction.scala @@ -8,6 +8,7 @@ package scala + /** A partial function of type `PartialFunction[A, B]` is a unary function * where the domain does not necessarily include all values of type `A`. * The function `isDefinedAt` allows to test dynamically if a value is in @@ -43,10 +44,11 @@ package scala * }}} * * - * @author Martin Odersky + * @author Martin Odersky, Pavel Pavlov, Adriaan Moors * @version 1.0, 16/07/2003 */ -trait PartialFunction[-A, +B] extends (A => B) { +trait PartialFunction[-A, +B] extends (A => B) { self => + import PartialFunction._ /** Checks if a value is contained in the function's domain. * @@ -55,10 +57,6 @@ trait PartialFunction[-A, +B] extends (A => B) { */ def isDefinedAt(x: A): Boolean - //protected def missingCase[A1 <: A, B1 >: B]: PartialFunction[A1, B1] = PartialFunction.empty - - protected def missingCase(x: A): B = throw new MatchError(x) - /** Composes this partial function with a fallback partial function which * gets applied where this partial function is not defined. * @@ -70,16 +68,8 @@ trait PartialFunction[-A, +B] extends (A => B) { * takes `x` to `this(x)` where `this` is defined, and to `that(x)` where it is not. */ def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = - new runtime.AbstractPartialFunction[A1, B1] { - def _isDefinedAt(x: A1): Boolean = - PartialFunction.this.isDefinedAt(x) || that.isDefinedAt(x) - def apply(x: A1): B1 = - if (PartialFunction.this.isDefinedAt(x)) PartialFunction.this.apply(x) - else that.apply(x) - } - - def orElseFast[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = - orElse(that) + new OrElse[A1, B1] (this, that) + //TODO: why not overload it with orElse(that: F1): F1? /** Composes this partial function with a transformation function that * gets applied to results of this partial function. @@ -88,9 +78,9 @@ trait PartialFunction[-A, +B] extends (A => B) { * @return a partial function with the same domain as this partial function, which maps * arguments `x` to `k(this(x))`. */ - override def andThen[C](k: B => C) : PartialFunction[A, C] = new runtime.AbstractPartialFunction[A, C] { - def _isDefinedAt(x: A): Boolean = PartialFunction.this.isDefinedAt(x) - def apply(x: A): C = k(PartialFunction.this.apply(x)) + override def andThen[C](k: B => C) : PartialFunction[A, C] = new PartialFunction[A, C] { + def isDefinedAt(x: A): Boolean = self isDefinedAt x + def apply(x: A): C = k(self(x)) } /** Turns this partial function into an plain function returning an `Option` result. @@ -98,9 +88,30 @@ trait PartialFunction[-A, +B] extends (A => B) { * @return a function that takes an argument `x` to `Some(this(x))` if `this` * is defined for `x`, and to `None` otherwise. */ - def lift: A => Option[B] = new (A => Option[B]) { - def apply(x: A): Option[B] = if (isDefinedAt(x)) Some(PartialFunction.this.apply(x)) else None - } + def lift: A => Option[B] = new Lifted(this) + + /** + * TODO: comment + * @since 2.10 + */ + def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = + if (isDefinedAt(x)) apply(x) else default(x) + + /** + * TODO: comment + * @since 2.10 + */ + def run[U](x: A)(action: B => U): Boolean = + applyOrElse(x, fallbackToken) match { + case FallbackToken => false + case z => action(z); true + } + + /** + * TODO: comment + * @since 2.10 + */ + def runWith[U](action: B => U): A => Boolean = { x => run(x)(action) } } /** A few handy operations which leverage the extra bit of information @@ -119,14 +130,73 @@ trait PartialFunction[-A, +B] extends (A => B) { * @since 2.8 */ object PartialFunction { - private[this] final val empty_pf: PartialFunction[Any, Nothing] = new runtime.AbstractPartialFunction[Any, Nothing] { - def _isDefinedAt(x: Any) = false - override def isDefinedAt(x: Any) = false - def apply(x: Any): Nothing = throw new MatchError(x) - override def orElse[A1, B1](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = that - override def orElseFast[A1, B1](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = that - override def lift = (x: Any) => None + /** Composite function produced by `PartialFunction#orElse` method + */ + private final class OrElse[-A, +B] (f1: PartialFunction[A, B], f2: PartialFunction[A, B]) extends PartialFunction[A, B] { + def isDefinedAt(x: A) = f1.isDefinedAt(x) || f2.isDefinedAt(x) + + def apply(x: A): B = f1.applyOrElse(x, f2) + + override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = + f1.applyOrElse(x, fallbackToken) match { + case FallbackToken => f2.applyOrElse(x, default) + case z => z + } + + override def orElse[A1 <: A, B1 >: B](that: PartialFunction[A1, B1]) = + new OrElse[A1, B1] (f1, f2 orElse that) + + override def andThen[C](k: B => C) = + new OrElse[A, C] (f1 andThen k, f2 andThen k) } + + private[scala] lazy val FallbackToken: PartialFunction[Any, PartialFunction[Any, Nothing]] = { case _ => FallbackToken.asInstanceOf[PartialFunction[Any, Nothing]] } + private[scala] final def fallbackToken[B] = FallbackToken.asInstanceOf[PartialFunction[Any, B]] + //TODO: check generated code for PF literal here + + private[scala] final class Lifted[-A, +B] (val pf: PartialFunction[A, B]) + extends runtime.AbstractFunction1[A, Option[B]] { + + def apply(x: A): Option[B] = pf.applyOrElse(x, fallbackToken) match { + case FallbackToken => None + case z => Some(z) + } + } + + private final class Unlifted[A, B] (f: A => Option[B]) extends runtime.AbstractPartialFunction[A, B] { + def isDefinedAt(x: A): Boolean = f(x).isDefined + override def applyOrElse[A1 <: A, B1 >: B](x: A1, default: A1 => B1): B1 = + f(x) getOrElse default(x) //TODO: check generated code and inline getOrElse if needed + override def lift = f + } + + private[scala] def unlifted[A, B](f: A => Option[B]): PartialFunction[A, B] = f match { + case lf: Lifted[A, B] => lf.pf + case ff => new Unlifted(ff) + } + + /** Converts ordinary function to partial one + * @since 2.10 + */ + //TODO: check generated code for PF literal here + def apply[A, B](f: A => B): PartialFunction[A, B] = { case x => f(x) } + + private[this] final val constFalse: Any => Boolean = { _ => false} + + private[this] final val empty_pf: PartialFunction[Any, Nothing] = new PartialFunction[Any, Nothing] { + def isDefinedAt(x: Any) = false + def apply(x: Any) = throw new MatchError(x) + override def orElse[A1, B1](that: PartialFunction[A1, B1]) = that + override def andThen[C](k: Nothing => C) = this + override val lift = (x: Any) => None + override def run[U](x: Any)(action: Nothing => U) = false + override def runWith[U](action: Nothing => U) = constFalse + } + + /** + * TODO: comment + * @since 2.10 + */ def empty[A, B] : PartialFunction[A, B] = empty_pf /** Creates a Boolean test based on a value and a partial function. @@ -137,8 +207,7 @@ object PartialFunction { * @param pf the partial function * @return true, iff `x` is in the domain of `pf` and `pf(x) == true`. */ - def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = - (pf isDefinedAt x) && pf(x) + def cond[T](x: T)(pf: PartialFunction[T, Boolean]): Boolean = pf.applyOrElse(x, constFalse) /** Transforms a PartialFunction[T, U] `pf` into Function1[T, Option[U]] `f` * whose result is `Some(x)` if the argument is in `pf`'s domain and `None` @@ -150,6 +219,5 @@ object PartialFunction { * @param pf the PartialFunction[T, U] * @return `Some(pf(x))` if `pf isDefinedAt x`, `None` otherwise. */ - def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = - if (pf isDefinedAt x) Some(pf(x)) else None + def condOpt[T,U](x: T)(pf: PartialFunction[T, U]): Option[U] = pf.lift(x) } diff --git a/src/library/scala/runtime/AbstractPartialFunction.scala b/src/library/scala/runtime/AbstractPartialFunction.scala index cbe778f09b..2e435d8a7e 100644 --- a/src/library/scala/runtime/AbstractPartialFunction.scala +++ b/src/library/scala/runtime/AbstractPartialFunction.scala @@ -8,45 +8,61 @@ package scala.runtime -import scala.annotation.unchecked.uncheckedVariance - -/** This class provides a default implementation of partial functions - * that is used for all partial function literals. - * It contains an optimized `orElse` method which supports - * chained `orElse` in linear time, and with no slow-down - * if the `orElse` part is not needed. - * The implementation of `orElse` works by cloning the abstract function object - * and modifying a private `fallBack` variable that encodes the `getorElse` part. +/** `AbstractPartialFunction` reformulates all operations of its supertrait `PartialFunction` in terms of `isDefinedAt` and `applyOrElse`. + * + * This allows more efficient implementations in many cases: + * - optimized `orElse` method supports chained `orElse` in linear time, + * and with no slow-down if the `orElse` part is not needed. + * - optimized `lift` method helps to avoid double evaluation of pattern matchers & guards + * of partial function literals. + * + * This trait is used as a basis for implementation of all partial function literals + * with non-exhaustive matchers. + * + * Use of `AbstractPartialFunction` instead of `PartialFunction` as a base trait for + * user-defined partial functions may result in better performance + * and more predictable behavior w.r.t. side effects. + * + * @author Pavel Pavlov + * @since 2.10 */ -abstract class AbstractPartialFunction[-T1, +R] - extends AbstractFunction1[T1, R] - with PartialFunction[T1, R] - with Cloneable { - - private var fallBackField: PartialFunction[T1 @uncheckedVariance, R @uncheckedVariance] = _ +abstract class AbstractPartialFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] { self => + // this method must be overridden for better performance, + // for backwards compatibility, fall back to the one inherited from PartialFunction + // this assumes the old-school partial functions override the apply method, though + // override def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = ??? - def fallBack: PartialFunction[T1, R] = synchronized { - if (fallBackField eq null) fallBackField = PartialFunction.empty - fallBackField - } + // probably okay to make final since classes compiled before have overridden against the old version of AbstractPartialFunction + // let's not make it final so as not to confuse anyone + /*final*/ def apply(x: T1): R = applyOrElse(x, PartialFunction.empty) - override protected def missingCase(x: T1): R = fallBack(x) - - // Question: Need to ensure that fallBack is overwritten before any access - // Is the `synchronized` here the right thing to achieve this? - // Is there a cheaper way? - override def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]) : PartialFunction[A1, B1] = { - val result = this.clone.asInstanceOf[AbstractPartialFunction[A1, B1]] - result.synchronized { - result.fallBackField = if (this.fallBackField eq null) that else this.fallBackField orElse that - result + override final def andThen[C](k: R => C) : PartialFunction[T1, C] = + new AbstractPartialFunction[T1, C] { + def isDefinedAt(x: T1): Boolean = self.isDefinedAt(x) + override def applyOrElse[A1 <: T1, C1 >: C](x: A1, default: A1 => C1): C1 = + self.applyOrElse(x, PartialFunction.fallbackToken) match { + case PartialFunction.FallbackToken => default(x) + case z => k(z) + } } - } - - def isDefinedAt(x: T1): scala.Boolean = _isDefinedAt(x) || fallBack.isDefinedAt(x) - def _isDefinedAt(x: T1): scala.Boolean + // TODO: remove + protected def missingCase(x: T1): R = throw new MatchError(x) } - +/** `AbstractTotalFunction` is a partial function whose `isDefinedAt` method always returns `true`. + * + * This class is used as base class for partial function literals with + * certainly exhaustive pattern matchers. + * + * @author Pavel Pavlov + * @since 2.10 + */ +abstract class AbstractTotalFunction[@specialized(scala.Int, scala.Long, scala.Float, scala.Double, scala.AnyRef) -T1, @specialized(scala.Unit, scala.Boolean, scala.Int, scala.Float, scala.Long, scala.Double, scala.AnyRef) +R] extends Function1[T1, R] with PartialFunction[T1, R] { + final def isDefinedAt(x: T1): Boolean = true + override final def applyOrElse[A1 <: T1, B1 >: R](x: A1, default: A1 => B1): B1 = apply(x) + override final def orElse[A1 <: T1, B1 >: R](that: PartialFunction[A1, B1]): PartialFunction[A1, B1] = this + //TODO: check generated code for PF literal here + override final def andThen[C](k: R => C): PartialFunction[T1, C] = { case x => k(apply(x)) } +} diff --git a/src/library/scala/util/control/Exception.scala b/src/library/scala/util/control/Exception.scala index 5e3f8b6451..20a179a884 100644 --- a/src/library/scala/util/control/Exception.scala +++ b/src/library/scala/util/control/Exception.scala @@ -230,8 +230,5 @@ object Exception { classes exists (_ isAssignableFrom x.getClass) private def pfFromExceptions(exceptions: Class[_]*): PartialFunction[Throwable, Nothing] = - new scala.runtime.AbstractPartialFunction[Throwable, Nothing] { - def apply(x: Throwable) = throw x - def _isDefinedAt(x: Throwable) = wouldMatch(x, exceptions) - } + { case x if wouldMatch(x, exceptions) => throw x } } diff --git a/test/files/run/lift-and-unlift.scala b/test/files/run/lift-and-unlift.scala index b944c70155..a4a5d9502e 100644 --- a/test/files/run/lift-and-unlift.scala +++ b/test/files/run/lift-and-unlift.scala @@ -2,7 +2,7 @@ import Function.unlift object Test { def evens1(x: Int) = if (x % 2 == 0) Some(x) else None - def evens2: PartialFunction[Int, Int] = { + val evens2: PartialFunction[Int, Int] = { case x if x % 2 == 0 => x } @@ -21,7 +21,7 @@ object Test { }) assert(f1 eq f3.lift) - // Hmm, why is this not true: - // assert(f2 eq f4.lift) + assert(f4 eq unlift(f2)) + assert(f4 eq evens2) } } -- cgit v1.2.3 From 75e584bd0c056a39ea6ef52848e8c2cbe764cb3a Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Fri, 30 Mar 2012 11:42:40 -0700 Subject: Fix for regression with inference at arity 21+. A classic "off by two" error. Closes SI-4545, SI-5633. --- src/compiler/scala/reflect/internal/Definitions.scala | 4 ++-- test/files/pos/t4545.scala | 14 ++++++++++++++ 2 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 test/files/pos/t4545.scala (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index a2dd6fc4c3..09c2228b01 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -601,8 +601,8 @@ trait Definitions extends reflect.api.StandardDefinitions { def isFunctionType(tp: Type): Boolean = tp.normalize match { case TypeRef(_, sym, args) if args.nonEmpty => - val len = args.length - len < MaxFunctionArity && sym == FunctionClass(len - 1) + val arity = args.length - 1 // -1 is the return type + arity <= MaxFunctionArity && sym == FunctionClass(arity) case _ => false } diff --git a/test/files/pos/t4545.scala b/test/files/pos/t4545.scala new file mode 100644 index 0000000000..8c7a3236c4 --- /dev/null +++ b/test/files/pos/t4545.scala @@ -0,0 +1,14 @@ +object Test { + def f[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T](table: Tuple20[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T) => Unit) { + } + def g[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U](table: Tuple21[A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U])(fun: (A, B, C, D, E, F, G, H, I, J, K, L, M, N, O, P, Q, R, S, T, U) => Unit) { + } + + def g20 = f( + ( 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) + ) { case ((a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t)) => () } + + def g21 = g( + (1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1) + ) { case ((a, b, c, d, e, f, g, h, i, j, k, l, m, n, o, p, q, r, s, t, u)) => () } +} -- cgit v1.2.3 From f7535f72903f083b2444fb1d0b73363efa5482e9 Mon Sep 17 00:00:00 2001 From: Paul Phillips Date: Sat, 31 Mar 2012 12:52:54 -0700 Subject: Pushed Symbol/Type creation partitioning further. Yet more funnelling of immutable creation-time known information into the identities of symbols and types. --- .../scala/reflect/internal/Definitions.scala | 66 ++++++++++---- src/compiler/scala/reflect/internal/Symbols.scala | 32 ++++--- src/compiler/scala/reflect/internal/Types.scala | 101 +++++++++++++-------- .../scala/tools/nsc/symtab/BrowsingLoaders.scala | 2 +- .../scala/tools/nsc/symtab/SymbolLoaders.scala | 8 +- .../nsc/symtab/classfile/ClassfileParser.scala | 5 +- .../tools/nsc/typechecker/ContextErrors.scala | 4 +- .../scala/tools/nsc/typechecker/RefChecks.scala | 1 + .../scala/tools/nsc/typechecker/Typers.scala | 9 +- test/files/neg/override-object-flag.check | 2 +- test/files/neg/override-object-no.check | 2 +- test/files/neg/t961.check | 2 +- test/files/presentation/callcc-interpreter.check | 2 +- 13 files changed, 148 insertions(+), 88 deletions(-) (limited to 'test/files') diff --git a/src/compiler/scala/reflect/internal/Definitions.scala b/src/compiler/scala/reflect/internal/Definitions.scala index 0188fb3944..8ea3cd511a 100644 --- a/src/compiler/scala/reflect/internal/Definitions.scala +++ b/src/compiler/scala/reflect/internal/Definitions.scala @@ -138,26 +138,47 @@ trait Definitions extends reflect.api.StandardDefinitions { // symbols related to packages var emptypackagescope: Scope = null //debug - // This is the package _root_. The actual root cannot be referenced at - // the source level, but _root_ is essentially a function () => . - lazy val RootPackage: Symbol = { - val rp = ( - NoSymbol.newValue(nme.ROOTPKG, NoPosition, FINAL | MODULE | PACKAGE | JAVA) - setInfo NullaryMethodType(RootClass.tpe) - ) - RootClass.sourceModule = rp - rp + sealed trait WellKnownSymbol extends Symbol { + this initFlags TopLevelCreationFlags + } + // Features common to RootClass and RootPackage, the roots of all + // type and term symbols respectively. + sealed trait RootSymbol extends WellKnownSymbol { + final override def isRootSymbol = true } + // This is the package _root_. The actual root cannot be referenced at + // the source level, but _root_ is essentially a function => . + final object RootPackage extends ModuleSymbol(NoSymbol, NoPosition, nme.ROOTPKG) with RootSymbol { + this setInfo NullaryMethodType(RootClass.tpe) + RootClass.sourceModule = this - // This is the actual root of everything, including the package _root_. - lazy val RootClass: ModuleClassSymbol = ( - NoSymbol.newModuleClassSymbol(tpnme.ROOT, NoPosition, FINAL | MODULE | PACKAGE | JAVA) - setInfo rootLoader - ) + override def isRootPackage = true + } + // This is , the actual root of everything except the package _root_. + // and _root_ (RootPackage and RootClass) should be the only "well known" + // symbols owned by NoSymbol. All owner chains should go through RootClass, + // although it is probable that some symbols are created as direct children + // of NoSymbol to ensure they will not be stumbled upon. (We should designate + // a better encapsulated place for that.) + final object RootClass extends ModuleClassSymbol(NoSymbol, NoPosition, tpnme.ROOT) with RootSymbol { + this setInfo rootLoader + + override def isRoot = true + override def isEffectiveRoot = true + override def isStatic = true + override def isNestedClass = false + override def ownerOfNewSymbols = EmptyPackageClass + } // The empty package, which holds all top level types without given packages. - lazy val EmptyPackage = RootClass.newPackage(nme.EMPTY_PACKAGE_NAME, NoPosition, FINAL) - lazy val EmptyPackageClass = EmptyPackage.moduleClass - + final object EmptyPackage extends ModuleSymbol(RootClass, NoPosition, nme.EMPTY_PACKAGE_NAME) with WellKnownSymbol { + override def isEmptyPackage = true + } + final object EmptyPackageClass extends ModuleClassSymbol(RootClass, NoPosition, tpnme.EMPTY_PACKAGE_NAME) with WellKnownSymbol { + override def isEffectiveRoot = true + override def isEmptyPackageClass = true + } + // It becomes tricky to create dedicated objects for other symbols because + // of initialization order issues. lazy val JavaLangPackage = getModule(sn.JavaLang) lazy val JavaLangPackageClass = JavaLangPackage.moduleClass lazy val ScalaPackage = getModule(nme.scala_) @@ -542,6 +563,12 @@ trait Definitions extends reflect.api.StandardDefinitions { // Checks whether the given type is true for the given condition, // or if it is a specialized subtype of a type for which it is true. + // + // Origins notes: + // An issue was introduced with specialization in that the implementation + // of "isTupleType" in Definitions relied upon sym == TupleClass(elems.length). + // This test is untrue for specialized tuples, causing mysterious behavior + // because only some tuples are specialized. def isPossiblySpecializedType(tp: Type)(cond: Type => Boolean) = { cond(tp) || (tp match { case TypeRef(pre, sym, args) if sym hasFlag SPECIALIZED => @@ -1112,9 +1139,14 @@ trait Definitions extends reflect.api.StandardDefinitions { def init() { if (isInitialized) return + // Still fiddling with whether it's cleaner to do some of this setup here + // or from constructors. The latter approach tends to invite init order issues. EmptyPackageClass setInfo ClassInfoType(Nil, newPackageScope(EmptyPackageClass), EmptyPackageClass) EmptyPackage setInfo EmptyPackageClass.tpe + connectModuleToClass(EmptyPackage, EmptyPackageClass) + connectModuleToClass(RootPackage, RootClass) + RootClass.info.decls enter EmptyPackage RootClass.info.decls enter RootPackage diff --git a/src/compiler/scala/reflect/internal/Symbols.scala b/src/compiler/scala/reflect/internal/Symbols.scala index 2019b92836..4473d63f5f 100644 --- a/src/compiler/scala/reflect/internal/Symbols.scala +++ b/src/compiler/scala/reflect/internal/Symbols.scala @@ -451,12 +451,23 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isVarargsMethod = isMethod && hasFlag(VARARGS) /** Package tests */ - final def isEmptyPackage = isPackage && name == nme.EMPTY_PACKAGE_NAME - final def isEmptyPackageClass = isPackageClass && name == tpnme.EMPTY_PACKAGE_NAME final def isPackage = isModule && hasFlag(PACKAGE) final def isPackageClass = isClass && hasFlag(PACKAGE) - final def isRoot = isPackageClass && owner == NoSymbol - final def isRootPackage = isPackage && owner == NoSymbol + + /** Overridden in custom objects in Definitions */ + def isRoot = false + def isRootPackage = false + def isRootSymbol = false // RootPackage and RootClass. TODO: also NoSymbol. + def isEmptyPackage = false + def isEmptyPackageClass = false + + /** Is this symbol an effective root for fullname string? + */ + def isEffectiveRoot = false + + /** For RootClass, EmptyPackageClass. For all other symbols, itself. + */ + def ownerOfNewSymbols = this /** Does this symbol denote a wrapper created by the repl? */ final def isInterpreterWrapper = ( @@ -464,9 +475,6 @@ trait Symbols extends api.Symbols { self: SymbolTable => && owner.isPackageClass && nme.isReplWrapperName(name) ) - /** Is this symbol an effective root for fullname string? - */ - def isEffectiveRoot = isRoot || isEmptyPackageClass /** Term symbols with the exception of static parts of Java classes and packages. */ @@ -652,8 +660,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isModuleVar = hasFlag(MODULEVAR) /** Is this symbol static (i.e. with no outer instance)? */ - final def isStatic: Boolean = - hasFlag(STATIC) || isRoot || owner.isStaticOwner + def isStatic = (this hasFlag STATIC) || owner.isStaticOwner /** Is this symbol a static constructor? */ final def isStaticConstructor: Boolean = @@ -685,8 +692,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => final def isConstant: Boolean = isStable && isConstantType(tpe.resultType) /** Is this class nested in another class or module (not a package)? */ - final def isNestedClass: Boolean = - isClass && !isRoot && !owner.isPackageClass + def isNestedClass = isClass && !owner.isPackageClass /** Is this class locally defined? * A class is local, if @@ -2045,7 +2051,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => def fullNameString: String = { def recur(sym: Symbol): String = { - if (sym.isRoot || sym.isRootPackage || sym == NoSymbol) sym.nameString + if (sym.isRootSymbol || sym == NoSymbol) sym.nameString else if (sym.owner.isEffectiveRoot) sym.nameString else recur(sym.effectiveOwner.enclClass) + "." + sym.nameString } @@ -2095,7 +2101,7 @@ trait Symbols extends api.Symbols { self: SymbolTable => case rt => " <: " + rt } ) - else if (isModule) moduleClass.infoString(tp) + else if (isModule) "" // avoid "object X of type X.type" else tp match { case PolyType(tparams, res) => typeParamsString(tp) + infoString(res) case NullaryMethodType(res) => infoString(res) diff --git a/src/compiler/scala/reflect/internal/Types.scala b/src/compiler/scala/reflect/internal/Types.scala index e062e875cd..5afa5343ed 100644 --- a/src/compiler/scala/reflect/internal/Types.scala +++ b/src/compiler/scala/reflect/internal/Types.scala @@ -904,6 +904,12 @@ trait Types extends api.Types { self: SymbolTable => else str } + /** The string representation of this type when the direct object in a sentence. + * Normally this is no different from the regular representation, but modules + * read better as "object Foo" here and "Foo.type" the rest of the time. + */ + def directObjectString = safeToString + /** A test whether a type contains any unification type variables. */ def isGround: Boolean = this match { case TypeVar(_, constr) => @@ -1224,8 +1230,7 @@ trait Types extends api.Types { self: SymbolTable => else if (sym.isModuleClass) sym.fullNameString + "." else sym.nameString + ".this." override def safeToString: String = - if (sym.isRoot) "" - else if (sym.isEmptyPackageClass) "" + if (sym.isEffectiveRoot) "" + sym.name else super.safeToString override def narrow: Type = this override def kind = "ThisType" @@ -1851,6 +1856,35 @@ trait Types extends api.Types { self: SymbolTable => // advantage to call TypeRef directly. override def typeConstructor = TypeRef(pre, sym, Nil) } + + class ModuleTypeRef(pre0: Type, sym0: Symbol) extends NoArgsTypeRef(pre0, sym0) with ClassTypeRef { + require(sym.isModuleClass, sym) + private[this] var narrowedCache: Type = _ + override def isStable = true + override def narrow = { + if (narrowedCache eq null) + narrowedCache = singleType(pre, sym.sourceModule) + + narrowedCache + } + final override def isNotNull = true + override protected def finishPrefix(rest: String) = objectPrefix + rest + override def directObjectString = super.safeToString + override def toLongString = toString + override def safeToString = narrow.toString + } + class PackageTypeRef(pre0: Type, sym0: Symbol) extends ModuleTypeRef(pre0, sym0) { + require(sym.isPackageClass, sym) + override protected def finishPrefix(rest: String) = packagePrefix + rest + } + class RefinementTypeRef(sym0: Symbol) extends NoArgsTypeRef(NoType, sym0) with ClassTypeRef { + require(sym.isRefinementClass, sym) + + // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers + override protected def normalizeImpl: Type = sym.info.normalize + override protected def finishPrefix(rest: String) = "" + thisInfo + } + class NoArgsTypeRef(pre0: Type, sym0: Symbol) extends TypeRef(pre0, sym0, Nil) with UniqueType { // A reference (in a Scala program) to a type that has type parameters, but where the reference // does not include type arguments. Note that it doesn't matter whether the symbol refers @@ -1898,10 +1932,6 @@ trait Types extends api.Types { self: SymbolTable => // !!! There are scaladoc-created symbols arriving which violate this require. // require(sym.isClass, sym) - override protected def normalizeImpl: Type = - if (sym.isRefinementClass) sym.info.normalize // I think this is okay, but see #1241 (r12414), #2208, and typedTypeConstructor in Typers - else super.normalizeImpl - override def baseType(clazz: Symbol): Type = if (sym == clazz) this else transform(sym.info.baseType(clazz)) @@ -2147,12 +2177,15 @@ trait Types extends api.Types { self: SymbolTable => } } - private def preString = ( - // ensure that symbol is not a local copy with a name coincidence - if (!settings.debug.value && shorthands(sym.fullName) && sym.ownerChain.forall(_.isClass)) "" - else pre.prefixString + // ensure that symbol is not a local copy with a name coincidence + private def needsPreString = ( + settings.debug.value + || !shorthands(sym.fullName) + || sym.ownerChain.exists(s => !s.isClass) ) + private def preString = if (needsPreString) pre.prefixString else "" private def argsString = if (args.isEmpty) "" else args.mkString("[", ",", "]") + def refinementString = ( if (sym.isStructuralRefinement) ( decls filter (sym => sym.isPossibleInRefinement && sym.isPublic) @@ -2162,12 +2195,9 @@ trait Types extends api.Types { self: SymbolTable => else "" ) - private def finishPrefix(rest: String) = ( - if (sym.isPackageClass) packagePrefix + rest - else if (sym.isModuleClass) objectPrefix + rest - else if (!sym.isInitialized) rest - else if (sym.isAnonymousClass && !phase.erasedTypes) parentsString(thisInfo.parents) + refinementString - else if (sym.isRefinementClass) "" + thisInfo + protected def finishPrefix(rest: String) = ( + if (sym.isInitialized && sym.isAnonymousClass && !phase.erasedTypes) + parentsString(thisInfo.parents) + refinementString else rest ) private def customToString = this match { @@ -2227,6 +2257,9 @@ trait Types extends api.Types { self: SymbolTable => else { if (sym.isAliasType) new NoArgsTypeRef(pre, sym) with AliasTypeRef else if (sym.isAbstractType) new NoArgsTypeRef(pre, sym) with AbstractTypeRef + else if (sym.isRefinementClass) new RefinementTypeRef(sym) + else if (sym.isPackageClass) new PackageTypeRef(pre, sym) + else if (sym.isModuleClass) new ModuleTypeRef(pre, sym) else new NoArgsTypeRef(pre, sym) with ClassTypeRef } }) @@ -4603,31 +4636,22 @@ trait Types extends api.Types { self: SymbolTable => object adaptToNewRunMap extends TypeMap { private def adaptToNewRun(pre: Type, sym: Symbol): Symbol = { - if (phase.flatClasses) { + if (phase.flatClasses || sym.isRootSymbol || (pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) sym - } else if (sym == definitions.RootClass) { - definitions.RootClass - } else if (sym == definitions.RootPackage) { - definitions.RootPackage - } else if (sym.isModuleClass) { + else if (sym.isModuleClass) { val sourceModule1 = adaptToNewRun(pre, sym.sourceModule) - var result = sourceModule1.moduleClass - if (result == NoSymbol) result = sourceModule1.initialize.moduleClass - if (result != NoSymbol) result - else { + + sourceModule1.moduleClass orElse sourceModule1.initialize.moduleClass orElse { val msg = "Cannot adapt module class; sym = %s, sourceModule = %s, sourceModule.moduleClass = %s => sourceModule1 = %s, sourceModule1.moduleClass = %s" debuglog(msg.format(sym, sym.sourceModule, sym.sourceModule.moduleClass, sourceModule1, sourceModule1.moduleClass)) sym } - } else if ((pre eq NoPrefix) || (pre eq NoType) || sym.isPackageClass) { - sym - } else { - var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) - if (rebind0 == NoSymbol) { + } + else { + var rebind0 = pre.findMember(sym.name, BRIDGE, 0, true) orElse { if (sym.isAliasType) throw missingAliasException debugwarn(pre+"."+sym+" does no longer exist, phase = "+phase) throw new MissingTypeControl // For build manager and presentation compiler purposes - //assert(false, pre+"."+sym+" does no longer exist, phase = "+phase) } /** The two symbols have the same fully qualified name */ def corresponds(sym1: Symbol, sym2: Symbol): Boolean = @@ -4646,12 +4670,10 @@ trait Types extends api.Types { self: SymbolTable => ", rebind = " + rebind0.fullLocationString ) } - val rebind = rebind0.suchThat(sym => sym.isType || sym.isStable) - if (rebind == NoSymbol) { + rebind0.suchThat(sym => sym.isType || sym.isStable) orElse { debuglog("" + phase + " " +phase.flatClasses+sym.owner+sym.name+" "+sym.isType) throw new MalformedType(pre, sym.nameString) } - rebind } } def apply(tp: Type): Type = tp match { @@ -5472,9 +5494,14 @@ trait Types extends api.Types { self: SymbolTable => case _: ClassSymbol => if (isRaw(sym1, tr1.args)) isSubType(rawToExistential(tp1), tp2, depth) - else - sym1.name == tpnme.REFINE_CLASS_NAME && + else if (sym1.isModuleClass) tp2 match { + case SingleType(_, sym2) => sym1 == sym2 + case _ => false + } + else if (sym1.isRefinementClass) isSubType(sym1.info, tp2, depth) + else false + case _: TypeSymbol => if (sym1 hasFlag DEFERRED) { val tp1a = tp1.bounds.hi diff --git a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala index 59342a36ef..5f7deb87bd 100644 --- a/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/BrowsingLoaders.scala @@ -105,7 +105,7 @@ abstract class BrowsingLoaders extends SymbolLoaders { */ override def enterToplevelsFromSource(root: Symbol, name: String, src: AbstractFile) { try { - if (root == definitions.RootClass || root == definitions.EmptyPackageClass) + if (root.isEffectiveRoot) // RootClass or EmptyPackageClass super.enterToplevelsFromSource(root, name, src) else browseTopLevel(root, src) diff --git a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala index f9ff147e82..7eb04eaf40 100644 --- a/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala +++ b/src/compiler/scala/tools/nsc/symtab/SymbolLoaders.scala @@ -30,15 +30,11 @@ abstract class SymbolLoaders { member } - private def realOwner(root: Symbol): Symbol = { - if (root.isRoot) definitions.EmptyPackageClass else root - } - /** Enter class with given `name` into scope of `root` * and give them `completer` as type. */ def enterClass(root: Symbol, name: String, completer: SymbolLoader): Symbol = { - val owner = realOwner(root) + val owner = root.ownerOfNewSymbols val clazz = owner.newClass(newTypeName(name)) clazz setInfo completer enterIfNew(owner, clazz, completer) @@ -48,7 +44,7 @@ abstract class SymbolLoaders { * and give them `completer` as type. */ def enterModule(root: Symbol, name: String, completer: SymbolLoader): Symbol = { - val owner = realOwner(root) + val owner = root.ownerOfNewSymbols val module = owner.newModule(newTermName(name)) module setInfo completer module.moduleClass setInfo moduleClassLoader diff --git a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala index 61668b1a8a..9dee441527 100644 --- a/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala +++ b/src/compiler/scala/tools/nsc/symtab/classfile/ClassfileParser.scala @@ -433,10 +433,7 @@ abstract class ClassfileParser { sym.info.decl(part.encode) }//.suchThat(module == _.isModule) - sym = ( - if (sym1 ne NoSymbol) sym1 - else sym.info.decl(part.encode.toTypeName) - ) + sym = sym1 orElse sym.info.decl(part.encode.toTypeName) } } sym diff --git a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala index 764823d786..49ddb985dc 100644 --- a/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala +++ b/src/compiler/scala/tools/nsc/typechecker/ContextErrors.scala @@ -317,7 +317,7 @@ trait ContextErrors { } withAddendum(qual.pos)( if (name == nme.CONSTRUCTOR) target + " does not have a constructor" - else nameString + " is not a member of " + targetKindString + target + addendum + else nameString + " is not a member of " + targetKindString + target.directObjectString + addendum ) } issueNormalTypeError(sel, errMsg) @@ -677,7 +677,7 @@ trait ContextErrors { def AccessError(tree: Tree, sym: Symbol, pre: Type, owner0: Symbol, explanation: String) = { def errMsg = { - val location = if (sym.isClassConstructor) owner0 else pre.widen + val location = if (sym.isClassConstructor) owner0 else pre.widen.directObjectString underlyingSymbol(sym).fullLocationString + " cannot be accessed in " + location + explanation diff --git a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala index 9177aca656..045614e773 100644 --- a/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala +++ b/src/compiler/scala/tools/nsc/typechecker/RefChecks.scala @@ -267,6 +267,7 @@ abstract class RefChecks extends InfoTransform with reflect.internal.transform.R sym1.locationString + (if (sym1.isAliasType) ", which equals "+self.memberInfo(sym1) else if (sym1.isAbstractType) " with bounds"+self.memberInfo(sym1) + else if (sym1.isModule) "" else if (sym1.isTerm) " of type "+self.memberInfo(sym1) else "") else "") diff --git a/src/compiler/scala/tools/nsc/typechecker/Typers.scala b/src/compiler/scala/tools/nsc/typechecker/Typers.scala index ae184d2677..2aff00f6a5 100644 --- a/src/compiler/scala/tools/nsc/typechecker/Typers.scala +++ b/src/compiler/scala/tools/nsc/typechecker/Typers.scala @@ -253,22 +253,23 @@ trait Typers extends Modes with Adaptations with PatMatVirtualiser { /** Check that `tpt` refers to a class type with a stable prefix. */ def checkStablePrefixClassType(tpt: Tree): Boolean = { val tpe = unwrapToStableClass(tpt.tpe) - def prefixIsStable = { def checkPre = tpe match { case TypeRef(pre, _, _) => pre.isStable || errorNotStable(tpt, pre) - case _ => true + case _ => false } // A type projection like X#Y can get by the stable check if the // prefix is singleton-bounded, so peek at the tree too. def checkTree = tpt match { - case SelectFromTypeTree(qual, _) => isSingleType(qual.tpe) || errorNotStable(tpt, qual.tpe) + case SelectFromTypeTree(qual, _) => isSingleType(qual.tpe) || errorNotClass(tpt, tpe) case _ => true } checkPre && checkTree } - isNonRefinementClassType(tpe) && (isPastTyper || prefixIsStable) + ( (isNonRefinementClassType(tpe) || errorNotClass(tpt, tpe)) + && (isPastTyper || prefixIsStable) + ) } /** Check that type tp is not a subtype of itself. diff --git a/test/files/neg/override-object-flag.check b/test/files/neg/override-object-flag.check index 152d31ff8a..344165138d 100644 --- a/test/files/neg/override-object-flag.check +++ b/test/files/neg/override-object-flag.check @@ -1,4 +1,4 @@ -override-object-flag.scala:3: error: overriding object Foo in trait A of type object B.this.Foo; +override-object-flag.scala:3: error: overriding object Foo in trait A; object Foo cannot override final member trait B extends A { override object Foo } ^ diff --git a/test/files/neg/override-object-no.check b/test/files/neg/override-object-no.check index f9fb37381b..52bad2b937 100644 --- a/test/files/neg/override-object-no.check +++ b/test/files/neg/override-object-no.check @@ -10,7 +10,7 @@ an overriding object must conform to the overridden object's class bound; required: Object{def g: Int} trait Quux2 extends Quux1 { override object Bar { def g = "abc" } } // err ^ -override-object-no.scala:25: error: overriding object Bar in trait Quux3 of type object Quux4.this.Bar; +override-object-no.scala:25: error: overriding object Bar in trait Quux3; object Bar cannot override final member trait Quux4 extends Quux3 { override object Bar } // err ^ diff --git a/test/files/neg/t961.check b/test/files/neg/t961.check index 48273f764d..14d39b0f42 100644 --- a/test/files/neg/t961.check +++ b/test/files/neg/t961.check @@ -1,4 +1,4 @@ -t961.scala:11: error: object Temp.B does not take parameters +t961.scala:11: error: Temp.B.type does not take parameters B() match { ^ one error found diff --git a/test/files/presentation/callcc-interpreter.check b/test/files/presentation/callcc-interpreter.check index 3385ef12b7..c50e171b4e 100644 --- a/test/files/presentation/callcc-interpreter.check +++ b/test/files/presentation/callcc-interpreter.check @@ -52,7 +52,7 @@ retrieved 64 members `method wait(x$1: Long, x$2: Int)Unit` `method x=> callccInterpreter.type` `method →[B](y: B)(callccInterpreter.type, B)` -`object Wrongobject callccInterpreter.Wrong` +`object WrongcallccInterpreter.Wrong.type` `trait TermcallccInterpreter.Term` `trait ValuecallccInterpreter.Value` `type AnswercallccInterpreter.Answer` -- cgit v1.2.3